summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.drone.yml2
-rw-r--r--.gitattributes2
-rw-r--r--Makefile7
-rw-r--r--build/generate-bindata.go (renamed from scripts/generate-bindata.go)0
-rw-r--r--build/generate-gitignores.go (renamed from scripts/generate-gitignores.go)0
-rw-r--r--build/generate-licenses.go (renamed from scripts/generate-licenses.go)0
-rw-r--r--build/lint.go325
-rwxr-xr-xbuild/update-locales.sh (renamed from scripts/update-locales.sh)0
-rw-r--r--build/vendor.go18
-rw-r--r--go.mod9
-rw-r--r--go.sum30
-rw-r--r--main.go4
-rw-r--r--modules/options/options_bindata.go2
-rw-r--r--modules/public/public_bindata.go2
-rw-r--r--modules/templates/templates_bindata.go2
-rw-r--r--vendor/github.com/fatih/color/LICENSE.md20
-rw-r--r--vendor/github.com/fatih/color/README.md182
-rw-r--r--vendor/github.com/fatih/color/color.go603
-rw-r--r--vendor/github.com/fatih/color/doc.go133
-rw-r--r--vendor/github.com/fatih/color/go.mod8
-rw-r--r--vendor/github.com/fatih/color/go.sum8
-rw-r--r--vendor/github.com/fatih/structtag/LICENSE60
-rw-r--r--vendor/github.com/fatih/structtag/README.md73
-rw-r--r--vendor/github.com/fatih/structtag/go.mod3
-rw-r--r--vendor/github.com/fatih/structtag/tags.go315
-rw-r--r--vendor/github.com/mattn/go-colorable/.travis.yml9
-rw-r--r--vendor/github.com/mattn/go-colorable/LICENSE21
-rw-r--r--vendor/github.com/mattn/go-colorable/README.md48
-rw-r--r--vendor/github.com/mattn/go-colorable/colorable_appengine.go29
-rw-r--r--vendor/github.com/mattn/go-colorable/colorable_others.go30
-rw-r--r--vendor/github.com/mattn/go-colorable/colorable_windows.go1005
-rw-r--r--vendor/github.com/mattn/go-colorable/go.mod3
-rw-r--r--vendor/github.com/mattn/go-colorable/go.sum4
-rw-r--r--vendor/github.com/mattn/go-colorable/noncolorable.go55
-rw-r--r--vendor/github.com/mattn/go-isatty/go.mod4
-rw-r--r--vendor/github.com/mattn/go-isatty/go.sum4
-rw-r--r--vendor/github.com/mattn/go-isatty/isatty_others.go2
-rw-r--r--vendor/github.com/mattn/go-isatty/isatty_plan9.go22
-rw-r--r--vendor/github.com/mattn/go-isatty/isatty_tcgets.go (renamed from vendor/github.com/mattn/go-isatty/isatty_linux.go)2
-rw-r--r--vendor/github.com/mattn/go-isatty/isatty_windows.go39
-rw-r--r--vendor/github.com/mattn/go-runewidth/.travis.yml8
-rw-r--r--vendor/github.com/mattn/go-runewidth/LICENSE21
-rw-r--r--vendor/github.com/mattn/go-runewidth/README.mkd27
-rw-r--r--vendor/github.com/mattn/go-runewidth/go.mod3
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth.go258
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_appengine.go8
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_js.go9
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_posix.go79
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_table.go427
-rw-r--r--vendor/github.com/mattn/go-runewidth/runewidth_windows.go28
-rw-r--r--vendor/github.com/mgechev/dots/.travis.yml2
-rw-r--r--vendor/github.com/mgechev/dots/LICENSE21
-rw-r--r--vendor/github.com/mgechev/dots/README.md100
-rw-r--r--vendor/github.com/mgechev/dots/resolve.go456
-rw-r--r--vendor/github.com/mgechev/revive/LICENSE21
-rw-r--r--vendor/github.com/mgechev/revive/formatter/checkstyle.go76
-rw-r--r--vendor/github.com/mgechev/revive/formatter/default.go26
-rw-r--r--vendor/github.com/mgechev/revive/formatter/friendly.go146
-rw-r--r--vendor/github.com/mgechev/revive/formatter/json.go40
-rw-r--r--vendor/github.com/mgechev/revive/formatter/ndjson.go34
-rw-r--r--vendor/github.com/mgechev/revive/formatter/plain.go26
-rw-r--r--vendor/github.com/mgechev/revive/formatter/severity.go13
-rw-r--r--vendor/github.com/mgechev/revive/formatter/stylish.go89
-rw-r--r--vendor/github.com/mgechev/revive/formatter/unix.go27
-rw-r--r--vendor/github.com/mgechev/revive/lint/config.go32
-rw-r--r--vendor/github.com/mgechev/revive/lint/failure.go39
-rw-r--r--vendor/github.com/mgechev/revive/lint/file.go278
-rw-r--r--vendor/github.com/mgechev/revive/lint/formatter.go14
-rw-r--r--vendor/github.com/mgechev/revive/lint/linter.go99
-rw-r--r--vendor/github.com/mgechev/revive/lint/package.go178
-rw-r--r--vendor/github.com/mgechev/revive/lint/rule.go31
-rw-r--r--vendor/github.com/mgechev/revive/lint/utils.go128
-rw-r--r--vendor/github.com/mgechev/revive/rule/add-constant.go151
-rw-r--r--vendor/github.com/mgechev/revive/rule/argument-limit.go67
-rw-r--r--vendor/github.com/mgechev/revive/rule/atomic.go94
-rw-r--r--vendor/github.com/mgechev/revive/rule/bare-return.go84
-rw-r--r--vendor/github.com/mgechev/revive/rule/blank-imports.go74
-rw-r--r--vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go73
-rw-r--r--vendor/github.com/mgechev/revive/rule/call-to-gc.go70
-rw-r--r--vendor/github.com/mgechev/revive/rule/cognitive-complexity.go195
-rw-r--r--vendor/github.com/mgechev/revive/rule/confusing-naming.go190
-rw-r--r--vendor/github.com/mgechev/revive/rule/confusing-results.go67
-rw-r--r--vendor/github.com/mgechev/revive/rule/constant-logical-expr.go88
-rw-r--r--vendor/github.com/mgechev/revive/rule/context-as-argument.go60
-rw-r--r--vendor/github.com/mgechev/revive/rule/context-keys-type.go81
-rw-r--r--vendor/github.com/mgechev/revive/rule/cyclomatic.go115
-rw-r--r--vendor/github.com/mgechev/revive/rule/deep-exit.go94
-rw-r--r--vendor/github.com/mgechev/revive/rule/dot-imports.go54
-rw-r--r--vendor/github.com/mgechev/revive/rule/duplicated-imports.go39
-rw-r--r--vendor/github.com/mgechev/revive/rule/empty-block.go76
-rw-r--r--vendor/github.com/mgechev/revive/rule/empty-lines.go113
-rw-r--r--vendor/github.com/mgechev/revive/rule/error-naming.go79
-rw-r--r--vendor/github.com/mgechev/revive/rule/error-return.go67
-rw-r--r--vendor/github.com/mgechev/revive/rule/error-strings.go98
-rw-r--r--vendor/github.com/mgechev/revive/rule/errorf.go93
-rw-r--r--vendor/github.com/mgechev/revive/rule/exported.go272
-rw-r--r--vendor/github.com/mgechev/revive/rule/file-header.go69
-rw-r--r--vendor/github.com/mgechev/revive/rule/flag-param.go104
-rw-r--r--vendor/github.com/mgechev/revive/rule/function-result-limit.go68
-rw-r--r--vendor/github.com/mgechev/revive/rule/get-return.go70
-rw-r--r--vendor/github.com/mgechev/revive/rule/if-return.go115
-rw-r--r--vendor/github.com/mgechev/revive/rule/import-shadowing.go102
-rw-r--r--vendor/github.com/mgechev/revive/rule/imports-blacklist.go52
-rw-r--r--vendor/github.com/mgechev/revive/rule/increment-decrement.go74
-rw-r--r--vendor/github.com/mgechev/revive/rule/indent-error-flow.go78
-rw-r--r--vendor/github.com/mgechev/revive/rule/line-length-limit.go84
-rw-r--r--vendor/github.com/mgechev/revive/rule/max-public-structs.go67
-rw-r--r--vendor/github.com/mgechev/revive/rule/modifies-param.go80
-rw-r--r--vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go134
-rw-r--r--vendor/github.com/mgechev/revive/rule/package-comments.go121
-rw-r--r--vendor/github.com/mgechev/revive/rule/range-val-address.go113
-rw-r--r--vendor/github.com/mgechev/revive/rule/range-val-in-closure.go111
-rw-r--r--vendor/github.com/mgechev/revive/rule/range.go82
-rw-r--r--vendor/github.com/mgechev/revive/rule/receiver-naming.go81
-rw-r--r--vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go145
-rw-r--r--vendor/github.com/mgechev/revive/rule/string-of-int.go95
-rw-r--r--vendor/github.com/mgechev/revive/rule/struct-tag.go236
-rw-r--r--vendor/github.com/mgechev/revive/rule/superfluous-else.go114
-rw-r--r--vendor/github.com/mgechev/revive/rule/time-naming.go93
-rw-r--r--vendor/github.com/mgechev/revive/rule/unexported-return.go106
-rw-r--r--vendor/github.com/mgechev/revive/rule/unhandled-error.go120
-rw-r--r--vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go107
-rw-r--r--vendor/github.com/mgechev/revive/rule/unreachable-code.go114
-rw-r--r--vendor/github.com/mgechev/revive/rule/unused-param.go102
-rw-r--r--vendor/github.com/mgechev/revive/rule/unused-receiver.go77
-rw-r--r--vendor/github.com/mgechev/revive/rule/utils.go191
-rw-r--r--vendor/github.com/mgechev/revive/rule/var-declarations.go120
-rw-r--r--vendor/github.com/mgechev/revive/rule/var-naming.go230
-rw-r--r--vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go66
-rw-r--r--vendor/github.com/olekukonko/tablewriter/.gitignore15
-rw-r--r--vendor/github.com/olekukonko/tablewriter/.travis.yml14
-rw-r--r--vendor/github.com/olekukonko/tablewriter/LICENSE.md19
-rw-r--r--vendor/github.com/olekukonko/tablewriter/README.md396
-rw-r--r--vendor/github.com/olekukonko/tablewriter/csv.go52
-rw-r--r--vendor/github.com/olekukonko/tablewriter/go.mod5
-rw-r--r--vendor/github.com/olekukonko/tablewriter/go.sum2
-rw-r--r--vendor/github.com/olekukonko/tablewriter/table.go941
-rw-r--r--vendor/github.com/olekukonko/tablewriter/table_with_color.go136
-rw-r--r--vendor/github.com/olekukonko/tablewriter/util.go93
-rw-r--r--vendor/github.com/olekukonko/tablewriter/wrap.go99
-rw-r--r--vendor/github.com/pkg/errors/.travis.yml11
-rw-r--r--vendor/github.com/pkg/errors/Makefile44
-rw-r--r--vendor/github.com/pkg/errors/README.md11
-rw-r--r--vendor/github.com/pkg/errors/errors.go8
-rw-r--r--vendor/github.com/pkg/errors/go113.go38
-rw-r--r--vendor/github.com/pkg/errors/stack.go58
-rw-r--r--vendor/golang.org/x/mod/LICENSE27
-rw-r--r--vendor/golang.org/x/mod/PATENTS22
-rw-r--r--vendor/golang.org/x/mod/module/module.go (renamed from vendor/golang.org/x/tools/internal/module/module.go)390
-rw-r--r--vendor/golang.org/x/mod/semver/semver.go (renamed from vendor/golang.org/x/tools/internal/semver/semver.go)4
-rw-r--r--vendor/golang.org/x/tools/go/ast/astutil/imports.go5
-rw-r--r--vendor/golang.org/x/tools/go/internal/cgo/cgo.go6
-rw-r--r--vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go8
-rw-r--r--vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go102
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist.go692
-rw-r--r--vendor/golang.org/x/tools/go/packages/golist_overlay.go201
-rw-r--r--vendor/golang.org/x/tools/go/packages/packages.go44
-rw-r--r--vendor/golang.org/x/tools/imports/forward.go5
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go10
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go2
-rw-r--r--vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go2
-rw-r--r--vendor/golang.org/x/tools/internal/gocommand/invoke.go121
-rw-r--r--vendor/golang.org/x/tools/internal/gopathwalk/walk.go11
-rw-r--r--vendor/golang.org/x/tools/internal/imports/fix.go634
-rw-r--r--vendor/golang.org/x/tools/internal/imports/imports.go30
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod.go283
-rw-r--r--vendor/golang.org/x/tools/internal/imports/mod_cache.go95
-rw-r--r--vendor/golang.org/x/tools/internal/packagesinternal/packages.go27
-rw-r--r--vendor/golang.org/x/xerrors/LICENSE27
-rw-r--r--vendor/golang.org/x/xerrors/PATENTS22
-rw-r--r--vendor/golang.org/x/xerrors/README2
-rw-r--r--vendor/golang.org/x/xerrors/adaptor.go193
-rw-r--r--vendor/golang.org/x/xerrors/codereview.cfg1
-rw-r--r--vendor/golang.org/x/xerrors/doc.go22
-rw-r--r--vendor/golang.org/x/xerrors/errors.go33
-rw-r--r--vendor/golang.org/x/xerrors/fmt.go109
-rw-r--r--vendor/golang.org/x/xerrors/format.go34
-rw-r--r--vendor/golang.org/x/xerrors/frame.go56
-rw-r--r--vendor/golang.org/x/xerrors/go.mod3
-rw-r--r--vendor/golang.org/x/xerrors/internal/internal.go8
-rw-r--r--vendor/golang.org/x/xerrors/wrap.go106
-rw-r--r--vendor/modules.txt37
182 files changed, 15833 insertions, 1227 deletions
diff --git a/.drone.yml b/.drone.yml
index 8dd673f19e..ee23f1ad08 100644
--- a/.drone.yml
+++ b/.drone.yml
@@ -339,7 +339,7 @@ steps:
pull: default
image: alpine:3.11
commands:
- - ./scripts/update-locales.sh
+ - ./build/update-locales.sh
- name: push
pull: always
diff --git a/.gitattributes b/.gitattributes
index 9024eba583..f76f5a6382 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -6,5 +6,5 @@ conf/* linguist-vendored
docker/* linguist-vendored
options/* linguist-vendored
public/* linguist-vendored
-scripts/* linguist-vendored
+build/* linguist-vendored
templates/* linguist-vendored
diff --git a/Makefile b/Makefile
index f34658d570..c78be87558 100644
--- a/Makefile
+++ b/Makefile
@@ -80,7 +80,7 @@ TAGS ?=
TAGS_SPLIT := $(subst $(COMMA), ,$(TAGS))
TAGS_EVIDENCE := $(MAKE_EVIDENCE_DIR)/tags
-GO_DIRS := cmd integrations models modules routers scripts services vendor
+GO_DIRS := cmd integrations models modules routers build services vendor
GO_SOURCES := $(wildcard *.go)
GO_SOURCES += $(shell find $(GO_DIRS) -type f -name "*.go" -not -path modules/options/bindata.go -not -path modules/public/bindata.go -not -path modules/templates/bindata.go)
@@ -234,10 +234,7 @@ errcheck:
.PHONY: revive
revive:
- @hash revive > /dev/null 2>&1; if [ $$? -ne 0 ]; then \
- $(GO) get -u github.com/mgechev/revive; \
- fi
- revive -config .revive.toml -exclude=./vendor/... ./... || exit 1
+ GO111MODULE=on $(GO) run -mod=vendor build/lint.go -config .revive.toml -exclude=./vendor/... ./... || exit 1
.PHONY: misspell-check
misspell-check:
diff --git a/scripts/generate-bindata.go b/build/generate-bindata.go
index fa1669fcf9..fa1669fcf9 100644
--- a/scripts/generate-bindata.go
+++ b/build/generate-bindata.go
diff --git a/scripts/generate-gitignores.go b/build/generate-gitignores.go
index 0f56ff3a89..0f56ff3a89 100644
--- a/scripts/generate-gitignores.go
+++ b/build/generate-gitignores.go
diff --git a/scripts/generate-licenses.go b/build/generate-licenses.go
index 15db19e70a..15db19e70a 100644
--- a/scripts/generate-licenses.go
+++ b/build/generate-licenses.go
diff --git a/build/lint.go b/build/lint.go
new file mode 100644
index 0000000000..bc6ddbec41
--- /dev/null
+++ b/build/lint.go
@@ -0,0 +1,325 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Copyright (c) 2018 Minko Gechev. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+// +build ignore
+
+package main
+
+import (
+ "flag"
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/BurntSushi/toml"
+ "github.com/mgechev/dots"
+ "github.com/mgechev/revive/formatter"
+ "github.com/mgechev/revive/lint"
+ "github.com/mgechev/revive/rule"
+ "github.com/mitchellh/go-homedir"
+)
+
+func fail(err string) {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+}
+
+var defaultRules = []lint.Rule{
+ &rule.VarDeclarationsRule{},
+ &rule.PackageCommentsRule{},
+ &rule.DotImportsRule{},
+ &rule.BlankImportsRule{},
+ &rule.ExportedRule{},
+ &rule.VarNamingRule{},
+ &rule.IndentErrorFlowRule{},
+ &rule.IfReturnRule{},
+ &rule.RangeRule{},
+ &rule.ErrorfRule{},
+ &rule.ErrorNamingRule{},
+ &rule.ErrorStringsRule{},
+ &rule.ReceiverNamingRule{},
+ &rule.IncrementDecrementRule{},
+ &rule.ErrorReturnRule{},
+ &rule.UnexportedReturnRule{},
+ &rule.TimeNamingRule{},
+ &rule.ContextKeysType{},
+ &rule.ContextAsArgumentRule{},
+}
+
+var allRules = append([]lint.Rule{
+ &rule.ArgumentsLimitRule{},
+ &rule.CyclomaticRule{},
+ &rule.FileHeaderRule{},
+ &rule.EmptyBlockRule{},
+ &rule.SuperfluousElseRule{},
+ &rule.ConfusingNamingRule{},
+ &rule.GetReturnRule{},
+ &rule.ModifiesParamRule{},
+ &rule.ConfusingResultsRule{},
+ &rule.DeepExitRule{},
+ &rule.UnusedParamRule{},
+ &rule.UnreachableCodeRule{},
+ &rule.AddConstantRule{},
+ &rule.FlagParamRule{},
+ &rule.UnnecessaryStmtRule{},
+ &rule.StructTagRule{},
+ &rule.ModifiesValRecRule{},
+ &rule.ConstantLogicalExprRule{},
+ &rule.BoolLiteralRule{},
+ &rule.RedefinesBuiltinIDRule{},
+ &rule.ImportsBlacklistRule{},
+ &rule.FunctionResultsLimitRule{},
+ &rule.MaxPublicStructsRule{},
+ &rule.RangeValInClosureRule{},
+ &rule.RangeValAddress{},
+ &rule.WaitGroupByValueRule{},
+ &rule.AtomicRule{},
+ &rule.EmptyLinesRule{},
+ &rule.LineLengthLimitRule{},
+ &rule.CallToGCRule{},
+ &rule.DuplicatedImportsRule{},
+ &rule.ImportShadowingRule{},
+ &rule.BareReturnRule{},
+ &rule.UnusedReceiverRule{},
+ &rule.UnhandledErrorRule{},
+ &rule.CognitiveComplexityRule{},
+ &rule.StringOfIntRule{},
+}, defaultRules...)
+
+var allFormatters = []lint.Formatter{
+ &formatter.Stylish{},
+ &formatter.Friendly{},
+ &formatter.JSON{},
+ &formatter.NDJSON{},
+ &formatter.Default{},
+ &formatter.Unix{},
+ &formatter.Checkstyle{},
+ &formatter.Plain{},
+}
+
+func getFormatters() map[string]lint.Formatter {
+ result := map[string]lint.Formatter{}
+ for _, f := range allFormatters {
+ result[f.Name()] = f
+ }
+ return result
+}
+
+func getLintingRules(config *lint.Config) []lint.Rule {
+ rulesMap := map[string]lint.Rule{}
+ for _, r := range allRules {
+ rulesMap[r.Name()] = r
+ }
+
+ lintingRules := []lint.Rule{}
+ for name := range config.Rules {
+ rule, ok := rulesMap[name]
+ if !ok {
+ fail("cannot find rule: " + name)
+ }
+ lintingRules = append(lintingRules, rule)
+ }
+
+ return lintingRules
+}
+
+func parseConfig(path string) *lint.Config {
+ config := &lint.Config{}
+ file, err := ioutil.ReadFile(path)
+ if err != nil {
+ fail("cannot read the config file")
+ }
+ _, err = toml.Decode(string(file), config)
+ if err != nil {
+ fail("cannot parse the config file: " + err.Error())
+ }
+ return config
+}
+
+func normalizeConfig(config *lint.Config) {
+ if config.Confidence == 0 {
+ config.Confidence = 0.8
+ }
+ severity := config.Severity
+ if severity != "" {
+ for k, v := range config.Rules {
+ if v.Severity == "" {
+ v.Severity = severity
+ }
+ config.Rules[k] = v
+ }
+ for k, v := range config.Directives {
+ if v.Severity == "" {
+ v.Severity = severity
+ }
+ config.Directives[k] = v
+ }
+ }
+}
+
+func getConfig() *lint.Config {
+ config := defaultConfig()
+ if configPath != "" {
+ config = parseConfig(configPath)
+ }
+ normalizeConfig(config)
+ return config
+}
+
+func getFormatter() lint.Formatter {
+ formatters := getFormatters()
+ formatter := formatters["default"]
+ if formatterName != "" {
+ f, ok := formatters[formatterName]
+ if !ok {
+ fail("unknown formatter " + formatterName)
+ }
+ formatter = f
+ }
+ return formatter
+}
+
+func buildDefaultConfigPath() string {
+ var result string
+ if homeDir, err := homedir.Dir(); err == nil {
+ result = filepath.Join(homeDir, "revive.toml")
+ if _, err := os.Stat(result); err != nil {
+ result = ""
+ }
+ }
+
+ return result
+}
+
+func defaultConfig() *lint.Config {
+ defaultConfig := lint.Config{
+ Confidence: 0.0,
+ Severity: lint.SeverityWarning,
+ Rules: map[string]lint.RuleConfig{},
+ }
+ for _, r := range defaultRules {
+ defaultConfig.Rules[r.Name()] = lint.RuleConfig{}
+ }
+ return &defaultConfig
+}
+
+func normalizeSplit(strs []string) []string {
+ res := []string{}
+ for _, s := range strs {
+ t := strings.Trim(s, " \t")
+ if len(t) > 0 {
+ res = append(res, t)
+ }
+ }
+ return res
+}
+
+func getPackages() [][]string {
+ globs := normalizeSplit(flag.Args())
+ if len(globs) == 0 {
+ globs = append(globs, ".")
+ }
+
+ packages, err := dots.ResolvePackages(globs, normalizeSplit(excludePaths))
+ if err != nil {
+ fail(err.Error())
+ }
+
+ return packages
+}
+
+type arrayFlags []string
+
+func (i *arrayFlags) String() string {
+ return strings.Join([]string(*i), " ")
+}
+
+func (i *arrayFlags) Set(value string) error {
+ *i = append(*i, value)
+ return nil
+}
+
+var configPath string
+var excludePaths arrayFlags
+var formatterName string
+var help bool
+
+var originalUsage = flag.Usage
+
+func init() {
+ flag.Usage = func() {
+ originalUsage()
+ }
+ // command line help strings
+ const (
+ configUsage = "path to the configuration TOML file, defaults to $HOME/revive.toml, if present (i.e. -config myconf.toml)"
+ excludeUsage = "list of globs which specify files to be excluded (i.e. -exclude foo/...)"
+ formatterUsage = "formatter to be used for the output (i.e. -formatter stylish)"
+ )
+
+ defaultConfigPath := buildDefaultConfigPath()
+
+ flag.StringVar(&configPath, "config", defaultConfigPath, configUsage)
+ flag.Var(&excludePaths, "exclude", excludeUsage)
+ flag.StringVar(&formatterName, "formatter", "", formatterUsage)
+ flag.Parse()
+}
+
+func main() {
+ config := getConfig()
+ formatter := getFormatter()
+ packages := getPackages()
+
+ revive := lint.New(func(file string) ([]byte, error) {
+ return ioutil.ReadFile(file)
+ })
+
+ lintingRules := getLintingRules(config)
+
+ failures, err := revive.Lint(packages, lintingRules, *config)
+ if err != nil {
+ fail(err.Error())
+ }
+
+ formatChan := make(chan lint.Failure)
+ exitChan := make(chan bool)
+
+ var output string
+ go (func() {
+ output, err = formatter.Format(formatChan, *config)
+ if err != nil {
+ fail(err.Error())
+ }
+ exitChan <- true
+ })()
+
+ exitCode := 0
+ for f := range failures {
+ if f.Confidence < config.Confidence {
+ continue
+ }
+ if exitCode == 0 {
+ exitCode = config.WarningCode
+ }
+ if c, ok := config.Rules[f.RuleName]; ok && c.Severity == lint.SeverityError {
+ exitCode = config.ErrorCode
+ }
+ if c, ok := config.Directives[f.RuleName]; ok && c.Severity == lint.SeverityError {
+ exitCode = config.ErrorCode
+ }
+
+ formatChan <- f
+ }
+
+ close(formatChan)
+ <-exitChan
+ if output != "" {
+ fmt.Println(output)
+ }
+
+ os.Exit(exitCode)
+}
diff --git a/scripts/update-locales.sh b/build/update-locales.sh
index 2dad93513b..2dad93513b 100755
--- a/scripts/update-locales.sh
+++ b/build/update-locales.sh
diff --git a/build/vendor.go b/build/vendor.go
new file mode 100644
index 0000000000..8610af2681
--- /dev/null
+++ b/build/vendor.go
@@ -0,0 +1,18 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Use of this source code is governed by a MIT-style
+// license that can be found in the LICENSE file.
+
+package build
+
+import (
+ // for lint
+ _ "github.com/BurntSushi/toml"
+ _ "github.com/mgechev/dots"
+ _ "github.com/mgechev/revive/formatter"
+ _ "github.com/mgechev/revive/lint"
+ _ "github.com/mgechev/revive/rule"
+ _ "github.com/mitchellh/go-homedir"
+
+ // for embed
+ _ "github.com/shurcooL/vfsgen"
+)
diff --git a/go.mod b/go.mod
index 54abb2292f..2258055b6e 100644
--- a/go.mod
+++ b/go.mod
@@ -16,6 +16,7 @@ require (
gitea.com/macaron/macaron v1.4.0
gitea.com/macaron/session v0.0.0-20191207215012-613cebf0674d
gitea.com/macaron/toolbox v0.0.0-20190822013122-05ff0fc766b7
+ github.com/BurntSushi/toml v0.3.1
github.com/PuerkitoBio/goquery v1.5.0
github.com/RoaringBitmap/roaring v0.4.21 // indirect
github.com/bgentry/speakeasy v0.1.0 // indirect
@@ -67,17 +68,20 @@ require (
github.com/lunny/dingtalk_webhook v0.0.0-20171025031554-e3534c89ef96
github.com/mailru/easyjson v0.7.0 // indirect
github.com/markbates/goth v1.61.2
- github.com/mattn/go-isatty v0.0.7
+ github.com/mattn/go-isatty v0.0.11
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d // indirect
github.com/mattn/go-sqlite3 v1.11.0
github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75
+ github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81
+ github.com/mgechev/revive v1.0.2
github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a
+ github.com/mitchellh/go-homedir v1.1.0
github.com/msteinert/pam v0.0.0-20151204160544-02ccfbfaf0cc
github.com/nfnt/resize v0.0.0-20160724205520-891127d8d1b5
github.com/niklasfasching/go-org v0.1.9
github.com/oliamb/cutter v0.2.2
github.com/olivere/elastic/v7 v7.0.9
- github.com/pkg/errors v0.8.1
+ github.com/pkg/errors v0.9.1
github.com/pquerna/otp v0.0.0-20160912161815-54653902c20e
github.com/prometheus/client_golang v1.1.0
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 // indirect
@@ -107,7 +111,6 @@ require (
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527
golang.org/x/text v0.3.2
- golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 // indirect
gopkg.in/alexcesaro/quotedprintable.v3 v3.0.0-20150716171945-2caba252f4dc // indirect
gopkg.in/asn1-ber.v1 v1.0.0-20150924051756-4e86f4367175 // indirect
gopkg.in/gomail.v2 v2.0.0-20160411212932-81ebce5c23df
diff --git a/go.sum b/go.sum
index 552be8a7d0..968302b2bb 100644
--- a/go.sum
+++ b/go.sum
@@ -157,6 +157,10 @@ github.com/facebookgo/stack v0.0.0-20160209184415-751773369052 h1:JWuenKqqX8nojt
github.com/facebookgo/stack v0.0.0-20160209184415-751773369052/go.mod h1:UbMTZqLaRiH3MsBH8va0n7s1pQYcu3uTb8G4tygF4Zg=
github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870 h1:E2s37DuLxFhQDg5gKsWoLBOB0n+ZW8s599zru8FJ2/Y=
github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870/go.mod h1:5tD+neXqOorC30/tWg0LCSkrqj/AR6gu8yY8/fpw1q0=
+github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
+github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
+github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
+github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 h1:BHsljHzVlRcyQhjrss6TZTdY2VfCqZPbv5k3iBFa2ZQ=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/fortytw2/leaktest v1.3.0 h1:u8491cBMTQ8ft8aeV+adlcytMZylmA5nnwwkRZjI8vw=
@@ -184,6 +188,7 @@ github.com/go-git/go-git/v5 v5.0.0/go.mod h1:oYD8y9kWsGINPFJoLdaScGCN6dlKg23blmC
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
+github.com/go-logr/logr v0.1.0/go.mod h1:ixOQHD9gLJUVQQ2ZOR7zLEifBX6tGkNJF4QyIY7sIas=
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
@@ -399,10 +404,15 @@ github.com/mailru/easyjson v0.7.0/go.mod h1:KAzv3t3aY1NaHWoQz1+4F1ccyAH66Jk7yos7
github.com/markbates/going v1.0.0/go.mod h1:I6mnB4BPnEeqo85ynXIx1ZFLLbtiLHNXVgWeFO9OGOA=
github.com/markbates/goth v1.61.2 h1:jDowrUH5qw8KGuQdKwFhLzkXkTYCIPfz3LHADJsiPIs=
github.com/markbates/goth v1.61.2/go.mod h1:qh2QfwZoWRucQ+DR5KVKC6dUGkNCToWh4vS45GIzFsY=
-github.com/mattn/go-isatty v0.0.7 h1:UvyT9uN+3r7yLEYSlJsbQGdsaB/a0DlgWP3pql6iwOc=
-github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
+github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
+github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d h1:m+dSK37rFf2fqppZhg15yI2IwC9BtucBiRwSDm9VL8g=
github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d/go.mod h1:/M9VLO+lUPmxvoOK2PfWRZ8mTtB4q1Hy9lEGijv9Nr8=
+github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
+github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-sqlite3 v1.10.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.11.0 h1:LDdKkqtYlom37fkvqs8rMPFKAMe8+SgjbwZ6ex1/A/Q=
github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
@@ -410,6 +420,10 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0j
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75 h1:Pijfgr7ZuvX7QIQiEwLdRVr3RoMG+i0SbBO1Qu+7yVk=
github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75/go.mod h1:76rfSfYPWj01Z85hUf/ituArm797mNKcvINh1OlsZKo=
+github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81 h1:QASJXOGm2RZ5Ardbc86qNFvby9AqkLDibfChMtAg5QM=
+github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81/go.mod h1:KQ7+USdGKfpPjXk4Ga+5XxQM4Lm4e3gAogrreFAYpOg=
+github.com/mgechev/revive v1.0.2 h1:v0NxxQ7fSFz/u1NQydPo6EGdq7va0J1BtsZmae6kzUg=
+github.com/mgechev/revive v1.0.2/go.mod h1:rb0dQy1LVAxW9SWy5R3LPUjevzUbUS316U5MFySA2lo=
github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a h1:d18LCO3ctH2kugUqt0pEyKKP8L+IYrocaPqGFilhTKk=
github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a/go.mod h1:hsXNsILzKxV+sX77C5b8FSuKF00vh2OMYv+xgHpAMF4=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
@@ -434,6 +448,8 @@ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLA
github.com/niklasfasching/go-org v0.1.9 h1:Toz8WMIt+qJb52uYEk1YD/muLuOOmRt1CfkV+bKVMkI=
github.com/niklasfasching/go-org v0.1.9/go.mod h1:AsLD6X7djzRIz4/RFZu8vwRL0VGjUvGZCCH1Nz0VdrU=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
+github.com/olekukonko/tablewriter v0.0.4 h1:vHD/YYe1Wolo78koG299f7V/VAS08c6IpCLn+Ejf/w8=
+github.com/olekukonko/tablewriter v0.0.4/go.mod h1:zq6QwlOf5SlnkVbMSr5EoBv3636FWnp+qbPhuoO21uA=
github.com/oliamb/cutter v0.2.2 h1:Lfwkya0HHNU1YLnGv2hTkzHfasrSMkgv4Dn+5rmlk3k=
github.com/oliamb/cutter v0.2.2/go.mod h1:4BenG2/4GuRBDbVm/OPahDVqbrOemzpPiG5mi1iryBU=
github.com/olivere/elastic/v7 v7.0.9 h1:+bTR1xJbfLYD8WnTBt9672mFlKxjfWRJpEQ1y8BMS3g=
@@ -457,6 +473,8 @@ github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
+github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
+github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/pquerna/cachecontrol v0.0.0-20180517163645-1555304b9b35/go.mod h1:prYjPmNq4d1NPVmpShWobRqXY3q7Vp+80DqgxxUrUIA=
@@ -631,6 +649,7 @@ golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTk
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
+golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee h1:WG0RUwxtNT4qqaXX3DPA8zHFNm/D9xaBpxzHt1WcA/E=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/net v0.0.0-20180218175443-cbe0f9307d01/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -690,6 +709,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190907184412-d223b2b6db03/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191010194322-b09406accb47 h1:/XfQ9z7ib8eEJX2hdgFTZJ/ntt0swNk5oYBziWeTCvY=
golang.org/x/sys v0.0.0-20191010194322-b09406accb47/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527 h1:uYVVQ9WP/Ds2ROhcaGPeIdVq0RIXVLwsHlnvJ+cT1So=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -714,9 +734,10 @@ golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgw
golang.org/x/tools v0.0.0-20190617190820-da514acc4774/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
-golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935 h1:kJQZhwFzSwJS2BxboKjdZzWczQOZx8VuH7Y8hhuGUtM=
-golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
+golang.org/x/tools v0.0.0-20200225230052-807dcd883420 h1:4RJNOV+2rLxMEfr6QIpC7GEv9MjD6ApGXTCLrNF9+eA=
+golang.org/x/tools v0.0.0-20200225230052-807dcd883420/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
+golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898 h1:/atklqdjdhuosWIl6AIbOeHJjicWYPqR9bpxqxYG2pA=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/api v0.3.1/go.mod h1:6wY9I6uQWHQ8EM57III9mq/AjF+i8G65rmVagqKMtkk=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
@@ -788,6 +809,7 @@ honnef.co/go/tools v0.0.0-20180728063816-88497007e858/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
+k8s.io/klog v1.0.0/go.mod h1:4Bi6QPql/J/LkTDqv7R/cd3hPo4k2DG6Ptcz060Ez5I=
mvdan.cc/xurls/v2 v2.1.0 h1:KaMb5GLhlcSX+e+qhbRJODnUUBvlw01jt4yrjFIHAuA=
mvdan.cc/xurls/v2 v2.1.0/go.mod h1:5GrSd9rOnKOpZaji1OZLYL/yeAAtGDlo/cFe+8K5n8E=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
diff --git a/main.go b/main.go
index bf7c59c252..ecf161bf10 100644
--- a/main.go
+++ b/main.go
@@ -21,8 +21,8 @@ import (
_ "code.gitea.io/gitea/modules/markup/markdown"
_ "code.gitea.io/gitea/modules/markup/orgmode"
- // for embed
- _ "github.com/shurcooL/vfsgen"
+ // for build
+ _ "code.gitea.io/gitea/build"
"github.com/urfave/cli"
)
diff --git a/modules/options/options_bindata.go b/modules/options/options_bindata.go
index a5143c1fff..262bd0de3e 100644
--- a/modules/options/options_bindata.go
+++ b/modules/options/options_bindata.go
@@ -6,4 +6,4 @@
package options
-//go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../options options bindata.go
+//go:generate go run -mod=vendor ../../build/generate-bindata.go ../../options options bindata.go
diff --git a/modules/public/public_bindata.go b/modules/public/public_bindata.go
index 68a786c767..05648aea80 100644
--- a/modules/public/public_bindata.go
+++ b/modules/public/public_bindata.go
@@ -6,4 +6,4 @@
package public
-//go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../public public bindata.go
+//go:generate go run -mod=vendor ../../build/generate-bindata.go ../../public public bindata.go
diff --git a/modules/templates/templates_bindata.go b/modules/templates/templates_bindata.go
index eaf64d9457..5a59286c7a 100644
--- a/modules/templates/templates_bindata.go
+++ b/modules/templates/templates_bindata.go
@@ -6,4 +6,4 @@
package templates
-//go:generate go run -mod=vendor ../../scripts/generate-bindata.go ../../templates templates bindata.go
+//go:generate go run -mod=vendor ../../build/generate-bindata.go ../../templates templates bindata.go
diff --git a/vendor/github.com/fatih/color/LICENSE.md b/vendor/github.com/fatih/color/LICENSE.md
new file mode 100644
index 0000000000..25fdaf639d
--- /dev/null
+++ b/vendor/github.com/fatih/color/LICENSE.md
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+
+Copyright (c) 2013 Fatih Arslan
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+the Software, and to permit persons to whom the Software is furnished to do so,
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/vendor/github.com/fatih/color/README.md b/vendor/github.com/fatih/color/README.md
new file mode 100644
index 0000000000..42d9abc07e
--- /dev/null
+++ b/vendor/github.com/fatih/color/README.md
@@ -0,0 +1,182 @@
+# Archived project. No maintenance.
+
+This project is not maintained anymore and is archived. Feel free to fork and
+make your own changes if needed. For more detail read my blog post: [Taking an indefinite sabbatical from my projects](https://arslan.io/2018/10/09/taking-an-indefinite-sabbatical-from-my-projects/)
+
+Thanks to everyone for their valuable feedback and contributions.
+
+
+# Color [![GoDoc](https://godoc.org/github.com/fatih/color?status.svg)](https://godoc.org/github.com/fatih/color)
+
+Color lets you use colorized outputs in terms of [ANSI Escape
+Codes](http://en.wikipedia.org/wiki/ANSI_escape_code#Colors) in Go (Golang). It
+has support for Windows too! The API can be used in several ways, pick one that
+suits you.
+
+
+![Color](https://i.imgur.com/c1JI0lA.png)
+
+
+## Install
+
+```bash
+go get github.com/fatih/color
+```
+
+## Examples
+
+### Standard colors
+
+```go
+// Print with default helper functions
+color.Cyan("Prints text in cyan.")
+
+// A newline will be appended automatically
+color.Blue("Prints %s in blue.", "text")
+
+// These are using the default foreground colors
+color.Red("We have red")
+color.Magenta("And many others ..")
+
+```
+
+### Mix and reuse colors
+
+```go
+// Create a new color object
+c := color.New(color.FgCyan).Add(color.Underline)
+c.Println("Prints cyan text with an underline.")
+
+// Or just add them to New()
+d := color.New(color.FgCyan, color.Bold)
+d.Printf("This prints bold cyan %s\n", "too!.")
+
+// Mix up foreground and background colors, create new mixes!
+red := color.New(color.FgRed)
+
+boldRed := red.Add(color.Bold)
+boldRed.Println("This will print text in bold red.")
+
+whiteBackground := red.Add(color.BgWhite)
+whiteBackground.Println("Red text with white background.")
+```
+
+### Use your own output (io.Writer)
+
+```go
+// Use your own io.Writer output
+color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
+
+blue := color.New(color.FgBlue)
+blue.Fprint(writer, "This will print text in blue.")
+```
+
+### Custom print functions (PrintFunc)
+
+```go
+// Create a custom print function for convenience
+red := color.New(color.FgRed).PrintfFunc()
+red("Warning")
+red("Error: %s", err)
+
+// Mix up multiple attributes
+notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
+notice("Don't forget this...")
+```
+
+### Custom fprint functions (FprintFunc)
+
+```go
+blue := color.New(FgBlue).FprintfFunc()
+blue(myWriter, "important notice: %s", stars)
+
+// Mix up with multiple attributes
+success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
+success(myWriter, "Don't forget this...")
+```
+
+### Insert into noncolor strings (SprintFunc)
+
+```go
+// Create SprintXxx functions to mix strings with other non-colorized strings:
+yellow := color.New(color.FgYellow).SprintFunc()
+red := color.New(color.FgRed).SprintFunc()
+fmt.Printf("This is a %s and this is %s.\n", yellow("warning"), red("error"))
+
+info := color.New(color.FgWhite, color.BgGreen).SprintFunc()
+fmt.Printf("This %s rocks!\n", info("package"))
+
+// Use helper functions
+fmt.Println("This", color.RedString("warning"), "should be not neglected.")
+fmt.Printf("%v %v\n", color.GreenString("Info:"), "an important message.")
+
+// Windows supported too! Just don't forget to change the output to color.Output
+fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
+```
+
+### Plug into existing code
+
+```go
+// Use handy standard colors
+color.Set(color.FgYellow)
+
+fmt.Println("Existing text will now be in yellow")
+fmt.Printf("This one %s\n", "too")
+
+color.Unset() // Don't forget to unset
+
+// You can mix up parameters
+color.Set(color.FgMagenta, color.Bold)
+defer color.Unset() // Use it in your function
+
+fmt.Println("All text will now be bold magenta.")
+```
+
+### Disable/Enable color
+
+There might be a case where you want to explicitly disable/enable color output. the
+`go-isatty` package will automatically disable color output for non-tty output streams
+(for example if the output were piped directly to `less`)
+
+`Color` has support to disable/enable colors both globally and for single color
+definitions. For example suppose you have a CLI app and a `--no-color` bool flag. You
+can easily disable the color output with:
+
+```go
+
+var flagNoColor = flag.Bool("no-color", false, "Disable color output")
+
+if *flagNoColor {
+ color.NoColor = true // disables colorized output
+}
+```
+
+It also has support for single color definitions (local). You can
+disable/enable color output on the fly:
+
+```go
+c := color.New(color.FgCyan)
+c.Println("Prints cyan text")
+
+c.DisableColor()
+c.Println("This is printed without any color")
+
+c.EnableColor()
+c.Println("This prints again cyan...")
+```
+
+## Todo
+
+* Save/Return previous values
+* Evaluate fmt.Formatter interface
+
+
+## Credits
+
+ * [Fatih Arslan](https://github.com/fatih)
+ * Windows support via @mattn: [colorable](https://github.com/mattn/go-colorable)
+
+## License
+
+The MIT License (MIT) - see [`LICENSE.md`](https://github.com/fatih/color/blob/master/LICENSE.md) for more details
+
diff --git a/vendor/github.com/fatih/color/color.go b/vendor/github.com/fatih/color/color.go
new file mode 100644
index 0000000000..91c8e9f062
--- /dev/null
+++ b/vendor/github.com/fatih/color/color.go
@@ -0,0 +1,603 @@
+package color
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+ "sync"
+
+ "github.com/mattn/go-colorable"
+ "github.com/mattn/go-isatty"
+)
+
+var (
+ // NoColor defines if the output is colorized or not. It's dynamically set to
+ // false or true based on the stdout's file descriptor referring to a terminal
+ // or not. This is a global option and affects all colors. For more control
+ // over each color block use the methods DisableColor() individually.
+ NoColor = os.Getenv("TERM") == "dumb" ||
+ (!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd()))
+
+ // Output defines the standard output of the print functions. By default
+ // os.Stdout is used.
+ Output = colorable.NewColorableStdout()
+
+ // Error defines a color supporting writer for os.Stderr.
+ Error = colorable.NewColorableStderr()
+
+ // colorsCache is used to reduce the count of created Color objects and
+ // allows to reuse already created objects with required Attribute.
+ colorsCache = make(map[Attribute]*Color)
+ colorsCacheMu sync.Mutex // protects colorsCache
+)
+
+// Color defines a custom color object which is defined by SGR parameters.
+type Color struct {
+ params []Attribute
+ noColor *bool
+}
+
+// Attribute defines a single SGR Code
+type Attribute int
+
+const escape = "\x1b"
+
+// Base attributes
+const (
+ Reset Attribute = iota
+ Bold
+ Faint
+ Italic
+ Underline
+ BlinkSlow
+ BlinkRapid
+ ReverseVideo
+ Concealed
+ CrossedOut
+)
+
+// Foreground text colors
+const (
+ FgBlack Attribute = iota + 30
+ FgRed
+ FgGreen
+ FgYellow
+ FgBlue
+ FgMagenta
+ FgCyan
+ FgWhite
+)
+
+// Foreground Hi-Intensity text colors
+const (
+ FgHiBlack Attribute = iota + 90
+ FgHiRed
+ FgHiGreen
+ FgHiYellow
+ FgHiBlue
+ FgHiMagenta
+ FgHiCyan
+ FgHiWhite
+)
+
+// Background text colors
+const (
+ BgBlack Attribute = iota + 40
+ BgRed
+ BgGreen
+ BgYellow
+ BgBlue
+ BgMagenta
+ BgCyan
+ BgWhite
+)
+
+// Background Hi-Intensity text colors
+const (
+ BgHiBlack Attribute = iota + 100
+ BgHiRed
+ BgHiGreen
+ BgHiYellow
+ BgHiBlue
+ BgHiMagenta
+ BgHiCyan
+ BgHiWhite
+)
+
+// New returns a newly created color object.
+func New(value ...Attribute) *Color {
+ c := &Color{params: make([]Attribute, 0)}
+ c.Add(value...)
+ return c
+}
+
+// Set sets the given parameters immediately. It will change the color of
+// output with the given SGR parameters until color.Unset() is called.
+func Set(p ...Attribute) *Color {
+ c := New(p...)
+ c.Set()
+ return c
+}
+
+// Unset resets all escape attributes and clears the output. Usually should
+// be called after Set().
+func Unset() {
+ if NoColor {
+ return
+ }
+
+ fmt.Fprintf(Output, "%s[%dm", escape, Reset)
+}
+
+// Set sets the SGR sequence.
+func (c *Color) Set() *Color {
+ if c.isNoColorSet() {
+ return c
+ }
+
+ fmt.Fprintf(Output, c.format())
+ return c
+}
+
+func (c *Color) unset() {
+ if c.isNoColorSet() {
+ return
+ }
+
+ Unset()
+}
+
+func (c *Color) setWriter(w io.Writer) *Color {
+ if c.isNoColorSet() {
+ return c
+ }
+
+ fmt.Fprintf(w, c.format())
+ return c
+}
+
+func (c *Color) unsetWriter(w io.Writer) {
+ if c.isNoColorSet() {
+ return
+ }
+
+ if NoColor {
+ return
+ }
+
+ fmt.Fprintf(w, "%s[%dm", escape, Reset)
+}
+
+// Add is used to chain SGR parameters. Use as many as parameters to combine
+// and create custom color objects. Example: Add(color.FgRed, color.Underline).
+func (c *Color) Add(value ...Attribute) *Color {
+ c.params = append(c.params, value...)
+ return c
+}
+
+func (c *Color) prepend(value Attribute) {
+ c.params = append(c.params, 0)
+ copy(c.params[1:], c.params[0:])
+ c.params[0] = value
+}
+
+// Fprint formats using the default formats for its operands and writes to w.
+// Spaces are added between operands when neither is a string.
+// It returns the number of bytes written and any write error encountered.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprint(w, a...)
+}
+
+// Print formats using the default formats for its operands and writes to
+// standard output. Spaces are added between operands when neither is a
+// string. It returns the number of bytes written and any write error
+// encountered. This is the standard fmt.Print() method wrapped with the given
+// color.
+func (c *Color) Print(a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprint(Output, a...)
+}
+
+// Fprintf formats according to a format specifier and writes to w.
+// It returns the number of bytes written and any write error encountered.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprintf(w, format, a...)
+}
+
+// Printf formats according to a format specifier and writes to standard output.
+// It returns the number of bytes written and any write error encountered.
+// This is the standard fmt.Printf() method wrapped with the given color.
+func (c *Color) Printf(format string, a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprintf(Output, format, a...)
+}
+
+// Fprintln formats using the default formats for its operands and writes to w.
+// Spaces are always added between operands and a newline is appended.
+// On Windows, users should wrap w with colorable.NewColorable() if w is of
+// type *os.File.
+func (c *Color) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
+ c.setWriter(w)
+ defer c.unsetWriter(w)
+
+ return fmt.Fprintln(w, a...)
+}
+
+// Println formats using the default formats for its operands and writes to
+// standard output. Spaces are always added between operands and a newline is
+// appended. It returns the number of bytes written and any write error
+// encountered. This is the standard fmt.Print() method wrapped with the given
+// color.
+func (c *Color) Println(a ...interface{}) (n int, err error) {
+ c.Set()
+ defer c.unset()
+
+ return fmt.Fprintln(Output, a...)
+}
+
+// Sprint is just like Print, but returns a string instead of printing it.
+func (c *Color) Sprint(a ...interface{}) string {
+ return c.wrap(fmt.Sprint(a...))
+}
+
+// Sprintln is just like Println, but returns a string instead of printing it.
+func (c *Color) Sprintln(a ...interface{}) string {
+ return c.wrap(fmt.Sprintln(a...))
+}
+
+// Sprintf is just like Printf, but returns a string instead of printing it.
+func (c *Color) Sprintf(format string, a ...interface{}) string {
+ return c.wrap(fmt.Sprintf(format, a...))
+}
+
+// FprintFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprint().
+func (c *Color) FprintFunc() func(w io.Writer, a ...interface{}) {
+ return func(w io.Writer, a ...interface{}) {
+ c.Fprint(w, a...)
+ }
+}
+
+// PrintFunc returns a new function that prints the passed arguments as
+// colorized with color.Print().
+func (c *Color) PrintFunc() func(a ...interface{}) {
+ return func(a ...interface{}) {
+ c.Print(a...)
+ }
+}
+
+// FprintfFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprintf().
+func (c *Color) FprintfFunc() func(w io.Writer, format string, a ...interface{}) {
+ return func(w io.Writer, format string, a ...interface{}) {
+ c.Fprintf(w, format, a...)
+ }
+}
+
+// PrintfFunc returns a new function that prints the passed arguments as
+// colorized with color.Printf().
+func (c *Color) PrintfFunc() func(format string, a ...interface{}) {
+ return func(format string, a ...interface{}) {
+ c.Printf(format, a...)
+ }
+}
+
+// FprintlnFunc returns a new function that prints the passed arguments as
+// colorized with color.Fprintln().
+func (c *Color) FprintlnFunc() func(w io.Writer, a ...interface{}) {
+ return func(w io.Writer, a ...interface{}) {
+ c.Fprintln(w, a...)
+ }
+}
+
+// PrintlnFunc returns a new function that prints the passed arguments as
+// colorized with color.Println().
+func (c *Color) PrintlnFunc() func(a ...interface{}) {
+ return func(a ...interface{}) {
+ c.Println(a...)
+ }
+}
+
+// SprintFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprint(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output, example:
+//
+// put := New(FgYellow).SprintFunc()
+// fmt.Fprintf(color.Output, "This is a %s", put("warning"))
+func (c *Color) SprintFunc() func(a ...interface{}) string {
+ return func(a ...interface{}) string {
+ return c.wrap(fmt.Sprint(a...))
+ }
+}
+
+// SprintfFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprintf(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output.
+func (c *Color) SprintfFunc() func(format string, a ...interface{}) string {
+ return func(format string, a ...interface{}) string {
+ return c.wrap(fmt.Sprintf(format, a...))
+ }
+}
+
+// SprintlnFunc returns a new function that returns colorized strings for the
+// given arguments with fmt.Sprintln(). Useful to put into or mix into other
+// string. Windows users should use this in conjunction with color.Output.
+func (c *Color) SprintlnFunc() func(a ...interface{}) string {
+ return func(a ...interface{}) string {
+ return c.wrap(fmt.Sprintln(a...))
+ }
+}
+
+// sequence returns a formatted SGR sequence to be plugged into a "\x1b[...m"
+// an example output might be: "1;36" -> bold cyan
+func (c *Color) sequence() string {
+ format := make([]string, len(c.params))
+ for i, v := range c.params {
+ format[i] = strconv.Itoa(int(v))
+ }
+
+ return strings.Join(format, ";")
+}
+
+// wrap wraps the s string with the colors attributes. The string is ready to
+// be printed.
+func (c *Color) wrap(s string) string {
+ if c.isNoColorSet() {
+ return s
+ }
+
+ return c.format() + s + c.unformat()
+}
+
+func (c *Color) format() string {
+ return fmt.Sprintf("%s[%sm", escape, c.sequence())
+}
+
+func (c *Color) unformat() string {
+ return fmt.Sprintf("%s[%dm", escape, Reset)
+}
+
+// DisableColor disables the color output. Useful to not change any existing
+// code and still being able to output. Can be used for flags like
+// "--no-color". To enable back use EnableColor() method.
+func (c *Color) DisableColor() {
+ c.noColor = boolPtr(true)
+}
+
+// EnableColor enables the color output. Use it in conjunction with
+// DisableColor(). Otherwise this method has no side effects.
+func (c *Color) EnableColor() {
+ c.noColor = boolPtr(false)
+}
+
+func (c *Color) isNoColorSet() bool {
+ // check first if we have user setted action
+ if c.noColor != nil {
+ return *c.noColor
+ }
+
+ // if not return the global option, which is disabled by default
+ return NoColor
+}
+
+// Equals returns a boolean value indicating whether two colors are equal.
+func (c *Color) Equals(c2 *Color) bool {
+ if len(c.params) != len(c2.params) {
+ return false
+ }
+
+ for _, attr := range c.params {
+ if !c2.attrExists(attr) {
+ return false
+ }
+ }
+
+ return true
+}
+
+func (c *Color) attrExists(a Attribute) bool {
+ for _, attr := range c.params {
+ if attr == a {
+ return true
+ }
+ }
+
+ return false
+}
+
+func boolPtr(v bool) *bool {
+ return &v
+}
+
+func getCachedColor(p Attribute) *Color {
+ colorsCacheMu.Lock()
+ defer colorsCacheMu.Unlock()
+
+ c, ok := colorsCache[p]
+ if !ok {
+ c = New(p)
+ colorsCache[p] = c
+ }
+
+ return c
+}
+
+func colorPrint(format string, p Attribute, a ...interface{}) {
+ c := getCachedColor(p)
+
+ if !strings.HasSuffix(format, "\n") {
+ format += "\n"
+ }
+
+ if len(a) == 0 {
+ c.Print(format)
+ } else {
+ c.Printf(format, a...)
+ }
+}
+
+func colorString(format string, p Attribute, a ...interface{}) string {
+ c := getCachedColor(p)
+
+ if len(a) == 0 {
+ return c.SprintFunc()(format)
+ }
+
+ return c.SprintfFunc()(format, a...)
+}
+
+// Black is a convenient helper function to print with black foreground. A
+// newline is appended to format by default.
+func Black(format string, a ...interface{}) { colorPrint(format, FgBlack, a...) }
+
+// Red is a convenient helper function to print with red foreground. A
+// newline is appended to format by default.
+func Red(format string, a ...interface{}) { colorPrint(format, FgRed, a...) }
+
+// Green is a convenient helper function to print with green foreground. A
+// newline is appended to format by default.
+func Green(format string, a ...interface{}) { colorPrint(format, FgGreen, a...) }
+
+// Yellow is a convenient helper function to print with yellow foreground.
+// A newline is appended to format by default.
+func Yellow(format string, a ...interface{}) { colorPrint(format, FgYellow, a...) }
+
+// Blue is a convenient helper function to print with blue foreground. A
+// newline is appended to format by default.
+func Blue(format string, a ...interface{}) { colorPrint(format, FgBlue, a...) }
+
+// Magenta is a convenient helper function to print with magenta foreground.
+// A newline is appended to format by default.
+func Magenta(format string, a ...interface{}) { colorPrint(format, FgMagenta, a...) }
+
+// Cyan is a convenient helper function to print with cyan foreground. A
+// newline is appended to format by default.
+func Cyan(format string, a ...interface{}) { colorPrint(format, FgCyan, a...) }
+
+// White is a convenient helper function to print with white foreground. A
+// newline is appended to format by default.
+func White(format string, a ...interface{}) { colorPrint(format, FgWhite, a...) }
+
+// BlackString is a convenient helper function to return a string with black
+// foreground.
+func BlackString(format string, a ...interface{}) string { return colorString(format, FgBlack, a...) }
+
+// RedString is a convenient helper function to return a string with red
+// foreground.
+func RedString(format string, a ...interface{}) string { return colorString(format, FgRed, a...) }
+
+// GreenString is a convenient helper function to return a string with green
+// foreground.
+func GreenString(format string, a ...interface{}) string { return colorString(format, FgGreen, a...) }
+
+// YellowString is a convenient helper function to return a string with yellow
+// foreground.
+func YellowString(format string, a ...interface{}) string { return colorString(format, FgYellow, a...) }
+
+// BlueString is a convenient helper function to return a string with blue
+// foreground.
+func BlueString(format string, a ...interface{}) string { return colorString(format, FgBlue, a...) }
+
+// MagentaString is a convenient helper function to return a string with magenta
+// foreground.
+func MagentaString(format string, a ...interface{}) string {
+ return colorString(format, FgMagenta, a...)
+}
+
+// CyanString is a convenient helper function to return a string with cyan
+// foreground.
+func CyanString(format string, a ...interface{}) string { return colorString(format, FgCyan, a...) }
+
+// WhiteString is a convenient helper function to return a string with white
+// foreground.
+func WhiteString(format string, a ...interface{}) string { return colorString(format, FgWhite, a...) }
+
+// HiBlack is a convenient helper function to print with hi-intensity black foreground. A
+// newline is appended to format by default.
+func HiBlack(format string, a ...interface{}) { colorPrint(format, FgHiBlack, a...) }
+
+// HiRed is a convenient helper function to print with hi-intensity red foreground. A
+// newline is appended to format by default.
+func HiRed(format string, a ...interface{}) { colorPrint(format, FgHiRed, a...) }
+
+// HiGreen is a convenient helper function to print with hi-intensity green foreground. A
+// newline is appended to format by default.
+func HiGreen(format string, a ...interface{}) { colorPrint(format, FgHiGreen, a...) }
+
+// HiYellow is a convenient helper function to print with hi-intensity yellow foreground.
+// A newline is appended to format by default.
+func HiYellow(format string, a ...interface{}) { colorPrint(format, FgHiYellow, a...) }
+
+// HiBlue is a convenient helper function to print with hi-intensity blue foreground. A
+// newline is appended to format by default.
+func HiBlue(format string, a ...interface{}) { colorPrint(format, FgHiBlue, a...) }
+
+// HiMagenta is a convenient helper function to print with hi-intensity magenta foreground.
+// A newline is appended to format by default.
+func HiMagenta(format string, a ...interface{}) { colorPrint(format, FgHiMagenta, a...) }
+
+// HiCyan is a convenient helper function to print with hi-intensity cyan foreground. A
+// newline is appended to format by default.
+func HiCyan(format string, a ...interface{}) { colorPrint(format, FgHiCyan, a...) }
+
+// HiWhite is a convenient helper function to print with hi-intensity white foreground. A
+// newline is appended to format by default.
+func HiWhite(format string, a ...interface{}) { colorPrint(format, FgHiWhite, a...) }
+
+// HiBlackString is a convenient helper function to return a string with hi-intensity black
+// foreground.
+func HiBlackString(format string, a ...interface{}) string {
+ return colorString(format, FgHiBlack, a...)
+}
+
+// HiRedString is a convenient helper function to return a string with hi-intensity red
+// foreground.
+func HiRedString(format string, a ...interface{}) string { return colorString(format, FgHiRed, a...) }
+
+// HiGreenString is a convenient helper function to return a string with hi-intensity green
+// foreground.
+func HiGreenString(format string, a ...interface{}) string {
+ return colorString(format, FgHiGreen, a...)
+}
+
+// HiYellowString is a convenient helper function to return a string with hi-intensity yellow
+// foreground.
+func HiYellowString(format string, a ...interface{}) string {
+ return colorString(format, FgHiYellow, a...)
+}
+
+// HiBlueString is a convenient helper function to return a string with hi-intensity blue
+// foreground.
+func HiBlueString(format string, a ...interface{}) string { return colorString(format, FgHiBlue, a...) }
+
+// HiMagentaString is a convenient helper function to return a string with hi-intensity magenta
+// foreground.
+func HiMagentaString(format string, a ...interface{}) string {
+ return colorString(format, FgHiMagenta, a...)
+}
+
+// HiCyanString is a convenient helper function to return a string with hi-intensity cyan
+// foreground.
+func HiCyanString(format string, a ...interface{}) string { return colorString(format, FgHiCyan, a...) }
+
+// HiWhiteString is a convenient helper function to return a string with hi-intensity white
+// foreground.
+func HiWhiteString(format string, a ...interface{}) string {
+ return colorString(format, FgHiWhite, a...)
+}
diff --git a/vendor/github.com/fatih/color/doc.go b/vendor/github.com/fatih/color/doc.go
new file mode 100644
index 0000000000..cf1e96500f
--- /dev/null
+++ b/vendor/github.com/fatih/color/doc.go
@@ -0,0 +1,133 @@
+/*
+Package color is an ANSI color package to output colorized or SGR defined
+output to the standard output. The API can be used in several way, pick one
+that suits you.
+
+Use simple and default helper functions with predefined foreground colors:
+
+ color.Cyan("Prints text in cyan.")
+
+ // a newline will be appended automatically
+ color.Blue("Prints %s in blue.", "text")
+
+ // More default foreground colors..
+ color.Red("We have red")
+ color.Yellow("Yellow color too!")
+ color.Magenta("And many others ..")
+
+ // Hi-intensity colors
+ color.HiGreen("Bright green color.")
+ color.HiBlack("Bright black means gray..")
+ color.HiWhite("Shiny white color!")
+
+However there are times where custom color mixes are required. Below are some
+examples to create custom color objects and use the print functions of each
+separate color object.
+
+ // Create a new color object
+ c := color.New(color.FgCyan).Add(color.Underline)
+ c.Println("Prints cyan text with an underline.")
+
+ // Or just add them to New()
+ d := color.New(color.FgCyan, color.Bold)
+ d.Printf("This prints bold cyan %s\n", "too!.")
+
+
+ // Mix up foreground and background colors, create new mixes!
+ red := color.New(color.FgRed)
+
+ boldRed := red.Add(color.Bold)
+ boldRed.Println("This will print text in bold red.")
+
+ whiteBackground := red.Add(color.BgWhite)
+ whiteBackground.Println("Red text with White background.")
+
+ // Use your own io.Writer output
+ color.New(color.FgBlue).Fprintln(myWriter, "blue color!")
+
+ blue := color.New(color.FgBlue)
+ blue.Fprint(myWriter, "This will print text in blue.")
+
+You can create PrintXxx functions to simplify even more:
+
+ // Create a custom print function for convenient
+ red := color.New(color.FgRed).PrintfFunc()
+ red("warning")
+ red("error: %s", err)
+
+ // Mix up multiple attributes
+ notice := color.New(color.Bold, color.FgGreen).PrintlnFunc()
+ notice("don't forget this...")
+
+You can also FprintXxx functions to pass your own io.Writer:
+
+ blue := color.New(FgBlue).FprintfFunc()
+ blue(myWriter, "important notice: %s", stars)
+
+ // Mix up with multiple attributes
+ success := color.New(color.Bold, color.FgGreen).FprintlnFunc()
+ success(myWriter, don't forget this...")
+
+
+Or create SprintXxx functions to mix strings with other non-colorized strings:
+
+ yellow := New(FgYellow).SprintFunc()
+ red := New(FgRed).SprintFunc()
+
+ fmt.Printf("this is a %s and this is %s.\n", yellow("warning"), red("error"))
+
+ info := New(FgWhite, BgGreen).SprintFunc()
+ fmt.Printf("this %s rocks!\n", info("package"))
+
+Windows support is enabled by default. All Print functions work as intended.
+However only for color.SprintXXX functions, user should use fmt.FprintXXX and
+set the output to color.Output:
+
+ fmt.Fprintf(color.Output, "Windows support: %s", color.GreenString("PASS"))
+
+ info := New(FgWhite, BgGreen).SprintFunc()
+ fmt.Fprintf(color.Output, "this %s rocks!\n", info("package"))
+
+Using with existing code is possible. Just use the Set() method to set the
+standard output to the given parameters. That way a rewrite of an existing
+code is not required.
+
+ // Use handy standard colors.
+ color.Set(color.FgYellow)
+
+ fmt.Println("Existing text will be now in Yellow")
+ fmt.Printf("This one %s\n", "too")
+
+ color.Unset() // don't forget to unset
+
+ // You can mix up parameters
+ color.Set(color.FgMagenta, color.Bold)
+ defer color.Unset() // use it in your function
+
+ fmt.Println("All text will be now bold magenta.")
+
+There might be a case where you want to disable color output (for example to
+pipe the standard output of your app to somewhere else). `Color` has support to
+disable colors both globally and for single color definition. For example
+suppose you have a CLI app and a `--no-color` bool flag. You can easily disable
+the color output with:
+
+ var flagNoColor = flag.Bool("no-color", false, "Disable color output")
+
+ if *flagNoColor {
+ color.NoColor = true // disables colorized output
+ }
+
+It also has support for single color definitions (local). You can
+disable/enable color output on the fly:
+
+ c := color.New(color.FgCyan)
+ c.Println("Prints cyan text")
+
+ c.DisableColor()
+ c.Println("This is printed without any color")
+
+ c.EnableColor()
+ c.Println("This prints again cyan...")
+*/
+package color
diff --git a/vendor/github.com/fatih/color/go.mod b/vendor/github.com/fatih/color/go.mod
new file mode 100644
index 0000000000..bc0df75458
--- /dev/null
+++ b/vendor/github.com/fatih/color/go.mod
@@ -0,0 +1,8 @@
+module github.com/fatih/color
+
+go 1.13
+
+require (
+ github.com/mattn/go-colorable v0.1.4
+ github.com/mattn/go-isatty v0.0.11
+)
diff --git a/vendor/github.com/fatih/color/go.sum b/vendor/github.com/fatih/color/go.sum
new file mode 100644
index 0000000000..44328a8db5
--- /dev/null
+++ b/vendor/github.com/fatih/color/go.sum
@@ -0,0 +1,8 @@
+github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
+github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
+github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
+github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
diff --git a/vendor/github.com/fatih/structtag/LICENSE b/vendor/github.com/fatih/structtag/LICENSE
new file mode 100644
index 0000000000..4fd15f9f8f
--- /dev/null
+++ b/vendor/github.com/fatih/structtag/LICENSE
@@ -0,0 +1,60 @@
+Copyright (c) 2017, Fatih Arslan
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+* Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+* Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+* Neither the name of structtag nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+This software includes some portions from Go. Go is used under the terms of the
+BSD like license.
+
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+The Go gopher was designed by Renee French. http://reneefrench.blogspot.com/ The design is licensed under the Creative Commons 3.0 Attributions license. Read this article for more details: https://blog.golang.org/gopher
diff --git a/vendor/github.com/fatih/structtag/README.md b/vendor/github.com/fatih/structtag/README.md
new file mode 100644
index 0000000000..c4e8b1e86e
--- /dev/null
+++ b/vendor/github.com/fatih/structtag/README.md
@@ -0,0 +1,73 @@
+# structtag [![GoDoc](http://img.shields.io/badge/go-documentation-blue.svg?style=flat-square)](http://godoc.org/github.com/fatih/structtag)
+
+structtag provides an easy way of parsing and manipulating struct tag fields.
+Please vendor the library as it might change in future versions.
+
+# Install
+
+```bash
+go get github.com/fatih/structtag
+```
+
+# Example
+
+```go
+package main
+
+import (
+ "fmt"
+ "reflect"
+ "sort"
+
+ "github.com/fatih/structtag"
+)
+
+func main() {
+ type t struct {
+ t string `json:"foo,omitempty,string" xml:"foo"`
+ }
+
+ // get field tag
+ tag := reflect.TypeOf(t{}).Field(0).Tag
+
+ // ... and start using structtag by parsing the tag
+ tags, err := structtag.Parse(string(tag))
+ if err != nil {
+ panic(err)
+ }
+
+ // iterate over all tags
+ for _, t := range tags.Tags() {
+ fmt.Printf("tag: %+v\n", t)
+ }
+
+ // get a single tag
+ jsonTag, err := tags.Get("json")
+ if err != nil {
+ panic(err)
+ }
+ fmt.Println(jsonTag) // Output: json:"foo,omitempty,string"
+ fmt.Println(jsonTag.Key) // Output: json
+ fmt.Println(jsonTag.Name) // Output: foo
+ fmt.Println(jsonTag.Options) // Output: [omitempty string]
+
+ // change existing tag
+ jsonTag.Name = "foo_bar"
+ jsonTag.Options = nil
+ tags.Set(jsonTag)
+
+ // add new tag
+ tags.Set(&structtag.Tag{
+ Key: "hcl",
+ Name: "foo",
+ Options: []string{"squash"},
+ })
+
+ // print the tags
+ fmt.Println(tags) // Output: json:"foo_bar" xml:"foo" hcl:"foo,squash"
+
+ // sort tags according to keys
+ sort.Sort(tags)
+ fmt.Println(tags) // Output: hcl:"foo,squash" json:"foo_bar" xml:"foo"
+}
+```
diff --git a/vendor/github.com/fatih/structtag/go.mod b/vendor/github.com/fatih/structtag/go.mod
new file mode 100644
index 0000000000..660d6a1f1c
--- /dev/null
+++ b/vendor/github.com/fatih/structtag/go.mod
@@ -0,0 +1,3 @@
+module github.com/fatih/structtag
+
+go 1.12
diff --git a/vendor/github.com/fatih/structtag/tags.go b/vendor/github.com/fatih/structtag/tags.go
new file mode 100644
index 0000000000..c168fb21c6
--- /dev/null
+++ b/vendor/github.com/fatih/structtag/tags.go
@@ -0,0 +1,315 @@
+package structtag
+
+import (
+ "bytes"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+var (
+ errTagSyntax = errors.New("bad syntax for struct tag pair")
+ errTagKeySyntax = errors.New("bad syntax for struct tag key")
+ errTagValueSyntax = errors.New("bad syntax for struct tag value")
+
+ errKeyNotSet = errors.New("tag key does not exist")
+ errTagNotExist = errors.New("tag does not exist")
+ errTagKeyMismatch = errors.New("mismatch between key and tag.key")
+)
+
+// Tags represent a set of tags from a single struct field
+type Tags struct {
+ tags []*Tag
+}
+
+// Tag defines a single struct's string literal tag
+type Tag struct {
+ // Key is the tag key, such as json, xml, etc..
+ // i.e: `json:"foo,omitempty". Here key is: "json"
+ Key string
+
+ // Name is a part of the value
+ // i.e: `json:"foo,omitempty". Here name is: "foo"
+ Name string
+
+ // Options is a part of the value. It contains a slice of tag options i.e:
+ // `json:"foo,omitempty". Here options is: ["omitempty"]
+ Options []string
+}
+
+// Parse parses a single struct field tag and returns the set of tags.
+func Parse(tag string) (*Tags, error) {
+ var tags []*Tag
+
+ hasTag := tag != ""
+
+ // NOTE(arslan) following code is from reflect and vet package with some
+ // modifications to collect all necessary information and extend it with
+ // usable methods
+ for tag != "" {
+ // Skip leading space.
+ i := 0
+ for i < len(tag) && tag[i] == ' ' {
+ i++
+ }
+ tag = tag[i:]
+ if tag == "" {
+ break
+ }
+
+ // Scan to colon. A space, a quote or a control character is a syntax
+ // error. Strictly speaking, control chars include the range [0x7f,
+ // 0x9f], not just [0x00, 0x1f], but in practice, we ignore the
+ // multi-byte control characters as it is simpler to inspect the tag's
+ // bytes than the tag's runes.
+ i = 0
+ for i < len(tag) && tag[i] > ' ' && tag[i] != ':' && tag[i] != '"' && tag[i] != 0x7f {
+ i++
+ }
+
+ if i == 0 {
+ return nil, errTagKeySyntax
+ }
+ if i+1 >= len(tag) || tag[i] != ':' {
+ return nil, errTagSyntax
+ }
+ if tag[i+1] != '"' {
+ return nil, errTagValueSyntax
+ }
+
+ key := string(tag[:i])
+ tag = tag[i+1:]
+
+ // Scan quoted string to find value.
+ i = 1
+ for i < len(tag) && tag[i] != '"' {
+ if tag[i] == '\\' {
+ i++
+ }
+ i++
+ }
+ if i >= len(tag) {
+ return nil, errTagValueSyntax
+ }
+
+ qvalue := string(tag[:i+1])
+ tag = tag[i+1:]
+
+ value, err := strconv.Unquote(qvalue)
+ if err != nil {
+ return nil, errTagValueSyntax
+ }
+
+ res := strings.Split(value, ",")
+ name := res[0]
+ options := res[1:]
+ if len(options) == 0 {
+ options = nil
+ }
+
+ tags = append(tags, &Tag{
+ Key: key,
+ Name: name,
+ Options: options,
+ })
+ }
+
+ if hasTag && len(tags) == 0 {
+ return nil, nil
+ }
+
+ return &Tags{
+ tags: tags,
+ }, nil
+}
+
+// Get returns the tag associated with the given key. If the key is present
+// in the tag the value (which may be empty) is returned. Otherwise the
+// returned value will be the empty string. The ok return value reports whether
+// the tag exists or not (which the return value is nil).
+func (t *Tags) Get(key string) (*Tag, error) {
+ for _, tag := range t.tags {
+ if tag.Key == key {
+ return tag, nil
+ }
+ }
+
+ return nil, errTagNotExist
+}
+
+// Set sets the given tag. If the tag key already exists it'll override it
+func (t *Tags) Set(tag *Tag) error {
+ if tag.Key == "" {
+ return errKeyNotSet
+ }
+
+ added := false
+ for i, tg := range t.tags {
+ if tg.Key == tag.Key {
+ added = true
+ t.tags[i] = tag
+ }
+ }
+
+ if !added {
+ // this means this is a new tag, add it
+ t.tags = append(t.tags, tag)
+ }
+
+ return nil
+}
+
+// AddOptions adds the given option for the given key. If the option already
+// exists it doesn't add it again.
+func (t *Tags) AddOptions(key string, options ...string) {
+ for i, tag := range t.tags {
+ if tag.Key != key {
+ continue
+ }
+
+ for _, opt := range options {
+ if !tag.HasOption(opt) {
+ tag.Options = append(tag.Options, opt)
+ }
+ }
+
+ t.tags[i] = tag
+ }
+}
+
+// DeleteOptions deletes the given options for the given key
+func (t *Tags) DeleteOptions(key string, options ...string) {
+ hasOption := func(option string) bool {
+ for _, opt := range options {
+ if opt == option {
+ return true
+ }
+ }
+ return false
+ }
+
+ for i, tag := range t.tags {
+ if tag.Key != key {
+ continue
+ }
+
+ var updated []string
+ for _, opt := range tag.Options {
+ if !hasOption(opt) {
+ updated = append(updated, opt)
+ }
+ }
+
+ tag.Options = updated
+ t.tags[i] = tag
+ }
+}
+
+// Delete deletes the tag for the given keys
+func (t *Tags) Delete(keys ...string) {
+ hasKey := func(key string) bool {
+ for _, k := range keys {
+ if k == key {
+ return true
+ }
+ }
+ return false
+ }
+
+ var updated []*Tag
+ for _, tag := range t.tags {
+ if !hasKey(tag.Key) {
+ updated = append(updated, tag)
+ }
+ }
+
+ t.tags = updated
+}
+
+// Tags returns a slice of tags. The order is the original tag order unless it
+// was changed.
+func (t *Tags) Tags() []*Tag {
+ return t.tags
+}
+
+// Tags returns a slice of tags. The order is the original tag order unless it
+// was changed.
+func (t *Tags) Keys() []string {
+ var keys []string
+ for _, tag := range t.tags {
+ keys = append(keys, tag.Key)
+ }
+ return keys
+}
+
+// String reassembles the tags into a valid literal tag field representation
+func (t *Tags) String() string {
+ tags := t.Tags()
+ if len(tags) == 0 {
+ return ""
+ }
+
+ var buf bytes.Buffer
+ for i, tag := range t.Tags() {
+ buf.WriteString(tag.String())
+ if i != len(tags)-1 {
+ buf.WriteString(" ")
+ }
+ }
+ return buf.String()
+}
+
+// HasOption returns true if the given option is available in options
+func (t *Tag) HasOption(opt string) bool {
+ for _, tagOpt := range t.Options {
+ if tagOpt == opt {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Value returns the raw value of the tag, i.e. if the tag is
+// `json:"foo,omitempty", the Value is "foo,omitempty"
+func (t *Tag) Value() string {
+ options := strings.Join(t.Options, ",")
+ if options != "" {
+ return fmt.Sprintf(`%s,%s`, t.Name, options)
+ }
+ return t.Name
+}
+
+// String reassembles the tag into a valid tag field representation
+func (t *Tag) String() string {
+ return fmt.Sprintf(`%s:%q`, t.Key, t.Value())
+}
+
+// GoString implements the fmt.GoStringer interface
+func (t *Tag) GoString() string {
+ template := `{
+ Key: '%s',
+ Name: '%s',
+ Option: '%s',
+ }`
+
+ if t.Options == nil {
+ return fmt.Sprintf(template, t.Key, t.Name, "nil")
+ }
+
+ options := strings.Join(t.Options, ",")
+ return fmt.Sprintf(template, t.Key, t.Name, options)
+}
+
+func (t *Tags) Len() int {
+ return len(t.tags)
+}
+
+func (t *Tags) Less(i int, j int) bool {
+ return t.tags[i].Key < t.tags[j].Key
+}
+
+func (t *Tags) Swap(i int, j int) {
+ t.tags[i], t.tags[j] = t.tags[j], t.tags[i]
+}
diff --git a/vendor/github.com/mattn/go-colorable/.travis.yml b/vendor/github.com/mattn/go-colorable/.travis.yml
new file mode 100644
index 0000000000..98db8f060b
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/.travis.yml
@@ -0,0 +1,9 @@
+language: go
+go:
+ - tip
+
+before_install:
+ - go get github.com/mattn/goveralls
+ - go get golang.org/x/tools/cmd/cover
+script:
+ - $HOME/gopath/bin/goveralls -repotoken xnXqRGwgW3SXIguzxf90ZSK1GPYZPaGrw
diff --git a/vendor/github.com/mattn/go-colorable/LICENSE b/vendor/github.com/mattn/go-colorable/LICENSE
new file mode 100644
index 0000000000..91b5cef30e
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Yasuhiro Matsumoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mattn/go-colorable/README.md b/vendor/github.com/mattn/go-colorable/README.md
new file mode 100644
index 0000000000..56729a92ca
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/README.md
@@ -0,0 +1,48 @@
+# go-colorable
+
+[![Godoc Reference](https://godoc.org/github.com/mattn/go-colorable?status.svg)](http://godoc.org/github.com/mattn/go-colorable)
+[![Build Status](https://travis-ci.org/mattn/go-colorable.svg?branch=master)](https://travis-ci.org/mattn/go-colorable)
+[![Coverage Status](https://coveralls.io/repos/github/mattn/go-colorable/badge.svg?branch=master)](https://coveralls.io/github/mattn/go-colorable?branch=master)
+[![Go Report Card](https://goreportcard.com/badge/mattn/go-colorable)](https://goreportcard.com/report/mattn/go-colorable)
+
+Colorable writer for windows.
+
+For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.)
+This package is possible to handle escape sequence for ansi color on windows.
+
+## Too Bad!
+
+![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/bad.png)
+
+
+## So Good!
+
+![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/good.png)
+
+## Usage
+
+```go
+logrus.SetFormatter(&logrus.TextFormatter{ForceColors: true})
+logrus.SetOutput(colorable.NewColorableStdout())
+
+logrus.Info("succeeded")
+logrus.Warn("not correct")
+logrus.Error("something error")
+logrus.Fatal("panic")
+```
+
+You can compile above code on non-windows OSs.
+
+## Installation
+
+```
+$ go get github.com/mattn/go-colorable
+```
+
+# License
+
+MIT
+
+# Author
+
+Yasuhiro Matsumoto (a.k.a mattn)
diff --git a/vendor/github.com/mattn/go-colorable/colorable_appengine.go b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
new file mode 100644
index 0000000000..0b0aef8370
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_appengine.go
@@ -0,0 +1,29 @@
+// +build appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable returns new instance of Writer which handles escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/mattn/go-colorable/colorable_others.go
new file mode 100644
index 0000000000..3fb771dcca
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_others.go
@@ -0,0 +1,30 @@
+// +build !windows
+// +build !appengine
+
+package colorable
+
+import (
+ "io"
+ "os"
+
+ _ "github.com/mattn/go-isatty"
+)
+
+// NewColorable returns new instance of Writer which handles escape sequence.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return os.Stdout
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return os.Stderr
+}
diff --git a/vendor/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/mattn/go-colorable/colorable_windows.go
new file mode 100644
index 0000000000..1bd628f25c
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/colorable_windows.go
@@ -0,0 +1,1005 @@
+// +build windows
+// +build !appengine
+
+package colorable
+
+import (
+ "bytes"
+ "io"
+ "math"
+ "os"
+ "strconv"
+ "strings"
+ "syscall"
+ "unsafe"
+
+ "github.com/mattn/go-isatty"
+)
+
+const (
+ foregroundBlue = 0x1
+ foregroundGreen = 0x2
+ foregroundRed = 0x4
+ foregroundIntensity = 0x8
+ foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity)
+ backgroundBlue = 0x10
+ backgroundGreen = 0x20
+ backgroundRed = 0x40
+ backgroundIntensity = 0x80
+ backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity)
+)
+
+const (
+ genericRead = 0x80000000
+ genericWrite = 0x40000000
+)
+
+const (
+ consoleTextmodeBuffer = 0x1
+)
+
+type wchar uint16
+type short int16
+type dword uint32
+type word uint16
+
+type coord struct {
+ x short
+ y short
+}
+
+type smallRect struct {
+ left short
+ top short
+ right short
+ bottom short
+}
+
+type consoleScreenBufferInfo struct {
+ size coord
+ cursorPosition coord
+ attributes word
+ window smallRect
+ maximumWindowSize coord
+}
+
+type consoleCursorInfo struct {
+ size dword
+ visible int32
+}
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
+ procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
+ procSetConsoleCursorPosition = kernel32.NewProc("SetConsoleCursorPosition")
+ procFillConsoleOutputCharacter = kernel32.NewProc("FillConsoleOutputCharacterW")
+ procFillConsoleOutputAttribute = kernel32.NewProc("FillConsoleOutputAttribute")
+ procGetConsoleCursorInfo = kernel32.NewProc("GetConsoleCursorInfo")
+ procSetConsoleCursorInfo = kernel32.NewProc("SetConsoleCursorInfo")
+ procSetConsoleTitle = kernel32.NewProc("SetConsoleTitleW")
+ procCreateConsoleScreenBuffer = kernel32.NewProc("CreateConsoleScreenBuffer")
+)
+
+// Writer provides colorable Writer to the console
+type Writer struct {
+ out io.Writer
+ handle syscall.Handle
+ althandle syscall.Handle
+ oldattr word
+ oldpos coord
+ rest bytes.Buffer
+}
+
+// NewColorable returns new instance of Writer which handles escape sequence from File.
+func NewColorable(file *os.File) io.Writer {
+ if file == nil {
+ panic("nil passed instead of *os.File to NewColorable()")
+ }
+
+ if isatty.IsTerminal(file.Fd()) {
+ var csbi consoleScreenBufferInfo
+ handle := syscall.Handle(file.Fd())
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ return &Writer{out: file, handle: handle, oldattr: csbi.attributes, oldpos: coord{0, 0}}
+ }
+ return file
+}
+
+// NewColorableStdout returns new instance of Writer which handles escape sequence for stdout.
+func NewColorableStdout() io.Writer {
+ return NewColorable(os.Stdout)
+}
+
+// NewColorableStderr returns new instance of Writer which handles escape sequence for stderr.
+func NewColorableStderr() io.Writer {
+ return NewColorable(os.Stderr)
+}
+
+var color256 = map[int]int{
+ 0: 0x000000,
+ 1: 0x800000,
+ 2: 0x008000,
+ 3: 0x808000,
+ 4: 0x000080,
+ 5: 0x800080,
+ 6: 0x008080,
+ 7: 0xc0c0c0,
+ 8: 0x808080,
+ 9: 0xff0000,
+ 10: 0x00ff00,
+ 11: 0xffff00,
+ 12: 0x0000ff,
+ 13: 0xff00ff,
+ 14: 0x00ffff,
+ 15: 0xffffff,
+ 16: 0x000000,
+ 17: 0x00005f,
+ 18: 0x000087,
+ 19: 0x0000af,
+ 20: 0x0000d7,
+ 21: 0x0000ff,
+ 22: 0x005f00,
+ 23: 0x005f5f,
+ 24: 0x005f87,
+ 25: 0x005faf,
+ 26: 0x005fd7,
+ 27: 0x005fff,
+ 28: 0x008700,
+ 29: 0x00875f,
+ 30: 0x008787,
+ 31: 0x0087af,
+ 32: 0x0087d7,
+ 33: 0x0087ff,
+ 34: 0x00af00,
+ 35: 0x00af5f,
+ 36: 0x00af87,
+ 37: 0x00afaf,
+ 38: 0x00afd7,
+ 39: 0x00afff,
+ 40: 0x00d700,
+ 41: 0x00d75f,
+ 42: 0x00d787,
+ 43: 0x00d7af,
+ 44: 0x00d7d7,
+ 45: 0x00d7ff,
+ 46: 0x00ff00,
+ 47: 0x00ff5f,
+ 48: 0x00ff87,
+ 49: 0x00ffaf,
+ 50: 0x00ffd7,
+ 51: 0x00ffff,
+ 52: 0x5f0000,
+ 53: 0x5f005f,
+ 54: 0x5f0087,
+ 55: 0x5f00af,
+ 56: 0x5f00d7,
+ 57: 0x5f00ff,
+ 58: 0x5f5f00,
+ 59: 0x5f5f5f,
+ 60: 0x5f5f87,
+ 61: 0x5f5faf,
+ 62: 0x5f5fd7,
+ 63: 0x5f5fff,
+ 64: 0x5f8700,
+ 65: 0x5f875f,
+ 66: 0x5f8787,
+ 67: 0x5f87af,
+ 68: 0x5f87d7,
+ 69: 0x5f87ff,
+ 70: 0x5faf00,
+ 71: 0x5faf5f,
+ 72: 0x5faf87,
+ 73: 0x5fafaf,
+ 74: 0x5fafd7,
+ 75: 0x5fafff,
+ 76: 0x5fd700,
+ 77: 0x5fd75f,
+ 78: 0x5fd787,
+ 79: 0x5fd7af,
+ 80: 0x5fd7d7,
+ 81: 0x5fd7ff,
+ 82: 0x5fff00,
+ 83: 0x5fff5f,
+ 84: 0x5fff87,
+ 85: 0x5fffaf,
+ 86: 0x5fffd7,
+ 87: 0x5fffff,
+ 88: 0x870000,
+ 89: 0x87005f,
+ 90: 0x870087,
+ 91: 0x8700af,
+ 92: 0x8700d7,
+ 93: 0x8700ff,
+ 94: 0x875f00,
+ 95: 0x875f5f,
+ 96: 0x875f87,
+ 97: 0x875faf,
+ 98: 0x875fd7,
+ 99: 0x875fff,
+ 100: 0x878700,
+ 101: 0x87875f,
+ 102: 0x878787,
+ 103: 0x8787af,
+ 104: 0x8787d7,
+ 105: 0x8787ff,
+ 106: 0x87af00,
+ 107: 0x87af5f,
+ 108: 0x87af87,
+ 109: 0x87afaf,
+ 110: 0x87afd7,
+ 111: 0x87afff,
+ 112: 0x87d700,
+ 113: 0x87d75f,
+ 114: 0x87d787,
+ 115: 0x87d7af,
+ 116: 0x87d7d7,
+ 117: 0x87d7ff,
+ 118: 0x87ff00,
+ 119: 0x87ff5f,
+ 120: 0x87ff87,
+ 121: 0x87ffaf,
+ 122: 0x87ffd7,
+ 123: 0x87ffff,
+ 124: 0xaf0000,
+ 125: 0xaf005f,
+ 126: 0xaf0087,
+ 127: 0xaf00af,
+ 128: 0xaf00d7,
+ 129: 0xaf00ff,
+ 130: 0xaf5f00,
+ 131: 0xaf5f5f,
+ 132: 0xaf5f87,
+ 133: 0xaf5faf,
+ 134: 0xaf5fd7,
+ 135: 0xaf5fff,
+ 136: 0xaf8700,
+ 137: 0xaf875f,
+ 138: 0xaf8787,
+ 139: 0xaf87af,
+ 140: 0xaf87d7,
+ 141: 0xaf87ff,
+ 142: 0xafaf00,
+ 143: 0xafaf5f,
+ 144: 0xafaf87,
+ 145: 0xafafaf,
+ 146: 0xafafd7,
+ 147: 0xafafff,
+ 148: 0xafd700,
+ 149: 0xafd75f,
+ 150: 0xafd787,
+ 151: 0xafd7af,
+ 152: 0xafd7d7,
+ 153: 0xafd7ff,
+ 154: 0xafff00,
+ 155: 0xafff5f,
+ 156: 0xafff87,
+ 157: 0xafffaf,
+ 158: 0xafffd7,
+ 159: 0xafffff,
+ 160: 0xd70000,
+ 161: 0xd7005f,
+ 162: 0xd70087,
+ 163: 0xd700af,
+ 164: 0xd700d7,
+ 165: 0xd700ff,
+ 166: 0xd75f00,
+ 167: 0xd75f5f,
+ 168: 0xd75f87,
+ 169: 0xd75faf,
+ 170: 0xd75fd7,
+ 171: 0xd75fff,
+ 172: 0xd78700,
+ 173: 0xd7875f,
+ 174: 0xd78787,
+ 175: 0xd787af,
+ 176: 0xd787d7,
+ 177: 0xd787ff,
+ 178: 0xd7af00,
+ 179: 0xd7af5f,
+ 180: 0xd7af87,
+ 181: 0xd7afaf,
+ 182: 0xd7afd7,
+ 183: 0xd7afff,
+ 184: 0xd7d700,
+ 185: 0xd7d75f,
+ 186: 0xd7d787,
+ 187: 0xd7d7af,
+ 188: 0xd7d7d7,
+ 189: 0xd7d7ff,
+ 190: 0xd7ff00,
+ 191: 0xd7ff5f,
+ 192: 0xd7ff87,
+ 193: 0xd7ffaf,
+ 194: 0xd7ffd7,
+ 195: 0xd7ffff,
+ 196: 0xff0000,
+ 197: 0xff005f,
+ 198: 0xff0087,
+ 199: 0xff00af,
+ 200: 0xff00d7,
+ 201: 0xff00ff,
+ 202: 0xff5f00,
+ 203: 0xff5f5f,
+ 204: 0xff5f87,
+ 205: 0xff5faf,
+ 206: 0xff5fd7,
+ 207: 0xff5fff,
+ 208: 0xff8700,
+ 209: 0xff875f,
+ 210: 0xff8787,
+ 211: 0xff87af,
+ 212: 0xff87d7,
+ 213: 0xff87ff,
+ 214: 0xffaf00,
+ 215: 0xffaf5f,
+ 216: 0xffaf87,
+ 217: 0xffafaf,
+ 218: 0xffafd7,
+ 219: 0xffafff,
+ 220: 0xffd700,
+ 221: 0xffd75f,
+ 222: 0xffd787,
+ 223: 0xffd7af,
+ 224: 0xffd7d7,
+ 225: 0xffd7ff,
+ 226: 0xffff00,
+ 227: 0xffff5f,
+ 228: 0xffff87,
+ 229: 0xffffaf,
+ 230: 0xffffd7,
+ 231: 0xffffff,
+ 232: 0x080808,
+ 233: 0x121212,
+ 234: 0x1c1c1c,
+ 235: 0x262626,
+ 236: 0x303030,
+ 237: 0x3a3a3a,
+ 238: 0x444444,
+ 239: 0x4e4e4e,
+ 240: 0x585858,
+ 241: 0x626262,
+ 242: 0x6c6c6c,
+ 243: 0x767676,
+ 244: 0x808080,
+ 245: 0x8a8a8a,
+ 246: 0x949494,
+ 247: 0x9e9e9e,
+ 248: 0xa8a8a8,
+ 249: 0xb2b2b2,
+ 250: 0xbcbcbc,
+ 251: 0xc6c6c6,
+ 252: 0xd0d0d0,
+ 253: 0xdadada,
+ 254: 0xe4e4e4,
+ 255: 0xeeeeee,
+}
+
+// `\033]0;TITLESTR\007`
+func doTitleSequence(er *bytes.Reader) error {
+ var c byte
+ var err error
+
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != '0' && c != '2' {
+ return nil
+ }
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c != ';' {
+ return nil
+ }
+ title := make([]byte, 0, 80)
+ for {
+ c, err = er.ReadByte()
+ if err != nil {
+ return err
+ }
+ if c == 0x07 || c == '\n' {
+ break
+ }
+ title = append(title, c)
+ }
+ if len(title) > 0 {
+ title8, err := syscall.UTF16PtrFromString(string(title))
+ if err == nil {
+ procSetConsoleTitle.Call(uintptr(unsafe.Pointer(title8)))
+ }
+ }
+ return nil
+}
+
+// returns Atoi(s) unless s == "" in which case it returns def
+func atoiWithDefault(s string, def int) (int, error) {
+ if s == "" {
+ return def, nil
+ }
+ return strconv.Atoi(s)
+}
+
+// Write writes data on console
+func (w *Writer) Write(data []byte) (n int, err error) {
+ var csbi consoleScreenBufferInfo
+ procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi)))
+
+ handle := w.handle
+
+ var er *bytes.Reader
+ if w.rest.Len() > 0 {
+ var rest bytes.Buffer
+ w.rest.WriteTo(&rest)
+ w.rest.Reset()
+ rest.Write(data)
+ er = bytes.NewReader(rest.Bytes())
+ } else {
+ er = bytes.NewReader(data)
+ }
+ var bw [1]byte
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c1 != 0x1b {
+ bw[0] = c1
+ w.out.Write(bw[:])
+ continue
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+
+ switch c2 {
+ case '>':
+ continue
+ case ']':
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+ if bytes.IndexByte(w.rest.Bytes(), 0x07) == -1 {
+ break loop
+ }
+ er = bytes.NewReader(w.rest.Bytes()[2:])
+ err := doTitleSequence(er)
+ if err != nil {
+ break loop
+ }
+ w.rest.Reset()
+ continue
+ // https://github.com/mattn/go-colorable/issues/27
+ case '7':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ continue
+ case '8':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ continue
+ case 0x5b:
+ // execute part after switch
+ default:
+ continue
+ }
+
+ w.rest.WriteByte(c1)
+ w.rest.WriteByte(c2)
+ er.WriteTo(&w.rest)
+
+ var buf bytes.Buffer
+ var m byte
+ for i, c := range w.rest.Bytes()[2:] {
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ m = c
+ er = bytes.NewReader(w.rest.Bytes()[2+i+1:])
+ w.rest.Reset()
+ break
+ }
+ buf.Write([]byte(string(c)))
+ }
+ if m == 0 {
+ break loop
+ }
+
+ switch m {
+ case 'A':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'B':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'C':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'D':
+ n, err = atoiWithDefault(buf.String(), 1)
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x -= short(n)
+ if csbi.cursorPosition.x < 0 {
+ csbi.cursorPosition.x = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'E':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y += short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'F':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = 0
+ csbi.cursorPosition.y -= short(n)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'G':
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ if n < 1 {
+ n = 1
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ csbi.cursorPosition.x = short(n - 1)
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'H', 'f':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ if buf.Len() > 0 {
+ token := strings.Split(buf.String(), ";")
+ switch len(token) {
+ case 1:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.y = short(n1 - 1)
+ case 2:
+ n1, err := strconv.Atoi(token[0])
+ if err != nil {
+ continue
+ }
+ n2, err := strconv.Atoi(token[1])
+ if err != nil {
+ continue
+ }
+ csbi.cursorPosition.x = short(n2 - 1)
+ csbi.cursorPosition.y = short(n1 - 1)
+ }
+ } else {
+ csbi.cursorPosition.y = 0
+ }
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&csbi.cursorPosition)))
+ case 'J':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ var count, written dword
+ var cursor coord
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.window.top-csbi.cursorPosition.y)*dword(csbi.size.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.window.top}
+ count = dword(csbi.size.x) - dword(csbi.cursorPosition.x) + dword(csbi.size.y-csbi.cursorPosition.y)*dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'K':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ var cursor coord
+ var count, written dword
+ switch n {
+ case 0:
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 1:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x - csbi.cursorPosition.x)
+ case 2:
+ cursor = coord{x: csbi.window.left, y: csbi.cursorPosition.y}
+ count = dword(csbi.size.x)
+ }
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(count), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'X':
+ n := 0
+ if buf.Len() > 0 {
+ n, err = strconv.Atoi(buf.String())
+ if err != nil {
+ continue
+ }
+ }
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ var cursor coord
+ var written dword
+ cursor = coord{x: csbi.cursorPosition.x, y: csbi.cursorPosition.y}
+ procFillConsoleOutputCharacter.Call(uintptr(handle), uintptr(' '), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ procFillConsoleOutputAttribute.Call(uintptr(handle), uintptr(csbi.attributes), uintptr(n), *(*uintptr)(unsafe.Pointer(&cursor)), uintptr(unsafe.Pointer(&written)))
+ case 'm':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ attr := csbi.attributes
+ cs := buf.String()
+ if cs == "" {
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(w.oldattr))
+ continue
+ }
+ token := strings.Split(cs, ";")
+ for i := 0; i < len(token); i++ {
+ ns := token[i]
+ if n, err = strconv.Atoi(ns); err == nil {
+ switch {
+ case n == 0 || n == 100:
+ attr = w.oldattr
+ case 1 <= n && n <= 5:
+ attr |= foregroundIntensity
+ case n == 7:
+ attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
+ case n == 22 || n == 25:
+ attr |= foregroundIntensity
+ case n == 27:
+ attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4)
+ case 30 <= n && n <= 37:
+ attr &= backgroundMask
+ if (n-30)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-30)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-30)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case n == 38: // set foreground color.
+ if i < len(token)-2 && (token[i+1] == "5" || token[i+1] == "05") {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256foreAttr == nil {
+ n256setup()
+ }
+ attr &= backgroundMask
+ attr |= n256foreAttr[n256]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= foregroundRed
+ }
+ if g > 127 {
+ attr |= foregroundGreen
+ }
+ if b > 127 {
+ attr |= foregroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & backgroundMask)
+ }
+ case n == 39: // reset foreground color.
+ attr &= backgroundMask
+ attr |= w.oldattr & foregroundMask
+ case 40 <= n && n <= 47:
+ attr &= foregroundMask
+ if (n-40)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-40)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-40)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ case n == 48: // set background color.
+ if i < len(token)-2 && token[i+1] == "5" {
+ if n256, err := strconv.Atoi(token[i+2]); err == nil {
+ if n256backAttr == nil {
+ n256setup()
+ }
+ attr &= foregroundMask
+ attr |= n256backAttr[n256]
+ i += 2
+ }
+ } else if len(token) == 5 && token[i+1] == "2" {
+ var r, g, b int
+ r, _ = strconv.Atoi(token[i+2])
+ g, _ = strconv.Atoi(token[i+3])
+ b, _ = strconv.Atoi(token[i+4])
+ i += 4
+ if r > 127 {
+ attr |= backgroundRed
+ }
+ if g > 127 {
+ attr |= backgroundGreen
+ }
+ if b > 127 {
+ attr |= backgroundBlue
+ }
+ } else {
+ attr = attr & (w.oldattr & foregroundMask)
+ }
+ case n == 49: // reset foreground color.
+ attr &= foregroundMask
+ attr |= w.oldattr & backgroundMask
+ case 90 <= n && n <= 97:
+ attr = (attr & backgroundMask)
+ attr |= foregroundIntensity
+ if (n-90)&1 != 0 {
+ attr |= foregroundRed
+ }
+ if (n-90)&2 != 0 {
+ attr |= foregroundGreen
+ }
+ if (n-90)&4 != 0 {
+ attr |= foregroundBlue
+ }
+ case 100 <= n && n <= 107:
+ attr = (attr & foregroundMask)
+ attr |= backgroundIntensity
+ if (n-100)&1 != 0 {
+ attr |= backgroundRed
+ }
+ if (n-100)&2 != 0 {
+ attr |= backgroundGreen
+ }
+ if (n-100)&4 != 0 {
+ attr |= backgroundBlue
+ }
+ }
+ procSetConsoleTextAttribute.Call(uintptr(handle), uintptr(attr))
+ }
+ }
+ case 'h':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle == 0 {
+ h, _, _ := procCreateConsoleScreenBuffer.Call(uintptr(genericRead|genericWrite), 0, 0, uintptr(consoleTextmodeBuffer), 0, 0)
+ w.althandle = syscall.Handle(h)
+ if w.althandle != 0 {
+ handle = w.althandle
+ }
+ }
+ }
+ case 'l':
+ var ci consoleCursorInfo
+ cs := buf.String()
+ if cs == "5>" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 1
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?25" {
+ procGetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ ci.visible = 0
+ procSetConsoleCursorInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&ci)))
+ } else if cs == "?1049" {
+ if w.althandle != 0 {
+ syscall.CloseHandle(w.althandle)
+ w.althandle = 0
+ handle = w.handle
+ }
+ }
+ case 's':
+ procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi)))
+ w.oldpos = csbi.cursorPosition
+ case 'u':
+ procSetConsoleCursorPosition.Call(uintptr(handle), *(*uintptr)(unsafe.Pointer(&w.oldpos)))
+ }
+ }
+
+ return len(data), nil
+}
+
+type consoleColor struct {
+ rgb int
+ red bool
+ green bool
+ blue bool
+ intensity bool
+}
+
+func (c consoleColor) foregroundAttr() (attr word) {
+ if c.red {
+ attr |= foregroundRed
+ }
+ if c.green {
+ attr |= foregroundGreen
+ }
+ if c.blue {
+ attr |= foregroundBlue
+ }
+ if c.intensity {
+ attr |= foregroundIntensity
+ }
+ return
+}
+
+func (c consoleColor) backgroundAttr() (attr word) {
+ if c.red {
+ attr |= backgroundRed
+ }
+ if c.green {
+ attr |= backgroundGreen
+ }
+ if c.blue {
+ attr |= backgroundBlue
+ }
+ if c.intensity {
+ attr |= backgroundIntensity
+ }
+ return
+}
+
+var color16 = []consoleColor{
+ {0x000000, false, false, false, false},
+ {0x000080, false, false, true, false},
+ {0x008000, false, true, false, false},
+ {0x008080, false, true, true, false},
+ {0x800000, true, false, false, false},
+ {0x800080, true, false, true, false},
+ {0x808000, true, true, false, false},
+ {0xc0c0c0, true, true, true, false},
+ {0x808080, false, false, false, true},
+ {0x0000ff, false, false, true, true},
+ {0x00ff00, false, true, false, true},
+ {0x00ffff, false, true, true, true},
+ {0xff0000, true, false, false, true},
+ {0xff00ff, true, false, true, true},
+ {0xffff00, true, true, false, true},
+ {0xffffff, true, true, true, true},
+}
+
+type hsv struct {
+ h, s, v float32
+}
+
+func (a hsv) dist(b hsv) float32 {
+ dh := a.h - b.h
+ switch {
+ case dh > 0.5:
+ dh = 1 - dh
+ case dh < -0.5:
+ dh = -1 - dh
+ }
+ ds := a.s - b.s
+ dv := a.v - b.v
+ return float32(math.Sqrt(float64(dh*dh + ds*ds + dv*dv)))
+}
+
+func toHSV(rgb int) hsv {
+ r, g, b := float32((rgb&0xFF0000)>>16)/256.0,
+ float32((rgb&0x00FF00)>>8)/256.0,
+ float32(rgb&0x0000FF)/256.0
+ min, max := minmax3f(r, g, b)
+ h := max - min
+ if h > 0 {
+ if max == r {
+ h = (g - b) / h
+ if h < 0 {
+ h += 6
+ }
+ } else if max == g {
+ h = 2 + (b-r)/h
+ } else {
+ h = 4 + (r-g)/h
+ }
+ }
+ h /= 6.0
+ s := max - min
+ if max != 0 {
+ s /= max
+ }
+ v := max
+ return hsv{h: h, s: s, v: v}
+}
+
+type hsvTable []hsv
+
+func toHSVTable(rgbTable []consoleColor) hsvTable {
+ t := make(hsvTable, len(rgbTable))
+ for i, c := range rgbTable {
+ t[i] = toHSV(c.rgb)
+ }
+ return t
+}
+
+func (t hsvTable) find(rgb int) consoleColor {
+ hsv := toHSV(rgb)
+ n := 7
+ l := float32(5.0)
+ for i, p := range t {
+ d := hsv.dist(p)
+ if d < l {
+ l, n = d, i
+ }
+ }
+ return color16[n]
+}
+
+func minmax3f(a, b, c float32) (min, max float32) {
+ if a < b {
+ if b < c {
+ return a, c
+ } else if a < c {
+ return a, b
+ } else {
+ return c, b
+ }
+ } else {
+ if a < c {
+ return b, c
+ } else if b < c {
+ return b, a
+ } else {
+ return c, a
+ }
+ }
+}
+
+var n256foreAttr []word
+var n256backAttr []word
+
+func n256setup() {
+ n256foreAttr = make([]word, 256)
+ n256backAttr = make([]word, 256)
+ t := toHSVTable(color16)
+ for i, rgb := range color256 {
+ c := t.find(rgb)
+ n256foreAttr[i] = c.foregroundAttr()
+ n256backAttr[i] = c.backgroundAttr()
+ }
+}
diff --git a/vendor/github.com/mattn/go-colorable/go.mod b/vendor/github.com/mattn/go-colorable/go.mod
new file mode 100644
index 0000000000..ef3ca9d4c3
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/go.mod
@@ -0,0 +1,3 @@
+module github.com/mattn/go-colorable
+
+require github.com/mattn/go-isatty v0.0.8
diff --git a/vendor/github.com/mattn/go-colorable/go.sum b/vendor/github.com/mattn/go-colorable/go.sum
new file mode 100644
index 0000000000..2c12960ec7
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/go.sum
@@ -0,0 +1,4 @@
+github.com/mattn/go-isatty v0.0.5 h1:tHXDdz1cpzGaovsTB+TVB8q90WEokoVmfMqoVcrLUgw=
+github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
+golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
diff --git a/vendor/github.com/mattn/go-colorable/noncolorable.go b/vendor/github.com/mattn/go-colorable/noncolorable.go
new file mode 100644
index 0000000000..95f2c6be25
--- /dev/null
+++ b/vendor/github.com/mattn/go-colorable/noncolorable.go
@@ -0,0 +1,55 @@
+package colorable
+
+import (
+ "bytes"
+ "io"
+)
+
+// NonColorable holds writer but removes escape sequence.
+type NonColorable struct {
+ out io.Writer
+}
+
+// NewNonColorable returns new instance of Writer which removes escape sequence from Writer.
+func NewNonColorable(w io.Writer) io.Writer {
+ return &NonColorable{out: w}
+}
+
+// Write writes data on console
+func (w *NonColorable) Write(data []byte) (n int, err error) {
+ er := bytes.NewReader(data)
+ var bw [1]byte
+loop:
+ for {
+ c1, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c1 != 0x1b {
+ bw[0] = c1
+ w.out.Write(bw[:])
+ continue
+ }
+ c2, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if c2 != 0x5b {
+ continue
+ }
+
+ var buf bytes.Buffer
+ for {
+ c, err := er.ReadByte()
+ if err != nil {
+ break loop
+ }
+ if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' {
+ break
+ }
+ buf.Write([]byte(string(c)))
+ }
+ }
+
+ return len(data), nil
+}
diff --git a/vendor/github.com/mattn/go-isatty/go.mod b/vendor/github.com/mattn/go-isatty/go.mod
index f310320c33..53d84a672c 100644
--- a/vendor/github.com/mattn/go-isatty/go.mod
+++ b/vendor/github.com/mattn/go-isatty/go.mod
@@ -1,3 +1,5 @@
module github.com/mattn/go-isatty
-require golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223
+go 1.12
+
+require golang.org/x/sys v0.0.0-20191026070338-33540a1f6037
diff --git a/vendor/github.com/mattn/go-isatty/go.sum b/vendor/github.com/mattn/go-isatty/go.sum
index 426c8973c0..5e0752bdf7 100644
--- a/vendor/github.com/mattn/go-isatty/go.sum
+++ b/vendor/github.com/mattn/go-isatty/go.sum
@@ -1,2 +1,2 @@
-golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
-golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
+golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
diff --git a/vendor/github.com/mattn/go-isatty/isatty_others.go b/vendor/github.com/mattn/go-isatty/isatty_others.go
index f02849c56f..ff714a3761 100644
--- a/vendor/github.com/mattn/go-isatty/isatty_others.go
+++ b/vendor/github.com/mattn/go-isatty/isatty_others.go
@@ -1,4 +1,4 @@
-// +build appengine js
+// +build appengine js nacl
package isatty
diff --git a/vendor/github.com/mattn/go-isatty/isatty_plan9.go b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
new file mode 100644
index 0000000000..c5b6e0c084
--- /dev/null
+++ b/vendor/github.com/mattn/go-isatty/isatty_plan9.go
@@ -0,0 +1,22 @@
+// +build plan9
+
+package isatty
+
+import (
+ "syscall"
+)
+
+// IsTerminal returns true if the given file descriptor is a terminal.
+func IsTerminal(fd uintptr) bool {
+ path, err := syscall.Fd2path(int(fd))
+ if err != nil {
+ return false
+ }
+ return path == "/dev/cons" || path == "/mnt/term/dev/cons"
+}
+
+// IsCygwinTerminal return true if the file descriptor is a cygwin or msys2
+// terminal. This is also always false on this environment.
+func IsCygwinTerminal(fd uintptr) bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-isatty/isatty_linux.go b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
index 4f8af46520..453b025d0d 100644
--- a/vendor/github.com/mattn/go-isatty/isatty_linux.go
+++ b/vendor/github.com/mattn/go-isatty/isatty_tcgets.go
@@ -1,4 +1,4 @@
-// +build linux
+// +build linux aix
// +build !appengine
// +build !android
diff --git a/vendor/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go
index af51cbcaa4..1fa8691540 100644
--- a/vendor/github.com/mattn/go-isatty/isatty_windows.go
+++ b/vendor/github.com/mattn/go-isatty/isatty_windows.go
@@ -4,6 +4,7 @@
package isatty
import (
+ "errors"
"strings"
"syscall"
"unicode/utf16"
@@ -11,15 +12,18 @@ import (
)
const (
- fileNameInfo uintptr = 2
- fileTypePipe = 3
+ objectNameInfo uintptr = 1
+ fileNameInfo = 2
+ fileTypePipe = 3
)
var (
kernel32 = syscall.NewLazyDLL("kernel32.dll")
+ ntdll = syscall.NewLazyDLL("ntdll.dll")
procGetConsoleMode = kernel32.NewProc("GetConsoleMode")
procGetFileInformationByHandleEx = kernel32.NewProc("GetFileInformationByHandleEx")
procGetFileType = kernel32.NewProc("GetFileType")
+ procNtQueryObject = ntdll.NewProc("NtQueryObject")
)
func init() {
@@ -45,7 +49,10 @@ func isCygwinPipeName(name string) bool {
return false
}
- if token[0] != `\msys` && token[0] != `\cygwin` {
+ if token[0] != `\msys` &&
+ token[0] != `\cygwin` &&
+ token[0] != `\Device\NamedPipe\msys` &&
+ token[0] != `\Device\NamedPipe\cygwin` {
return false
}
@@ -68,11 +75,35 @@ func isCygwinPipeName(name string) bool {
return true
}
+// getFileNameByHandle use the undocomented ntdll NtQueryObject to get file full name from file handler
+// since GetFileInformationByHandleEx is not avilable under windows Vista and still some old fashion
+// guys are using Windows XP, this is a workaround for those guys, it will also work on system from
+// Windows vista to 10
+// see https://stackoverflow.com/a/18792477 for details
+func getFileNameByHandle(fd uintptr) (string, error) {
+ if procNtQueryObject == nil {
+ return "", errors.New("ntdll.dll: NtQueryObject not supported")
+ }
+
+ var buf [4 + syscall.MAX_PATH]uint16
+ var result int
+ r, _, e := syscall.Syscall6(procNtQueryObject.Addr(), 5,
+ fd, objectNameInfo, uintptr(unsafe.Pointer(&buf)), uintptr(2*len(buf)), uintptr(unsafe.Pointer(&result)), 0)
+ if r != 0 {
+ return "", e
+ }
+ return string(utf16.Decode(buf[4 : 4+buf[0]/2])), nil
+}
+
// IsCygwinTerminal() return true if the file descriptor is a cygwin or msys2
// terminal.
func IsCygwinTerminal(fd uintptr) bool {
if procGetFileInformationByHandleEx == nil {
- return false
+ name, err := getFileNameByHandle(fd)
+ if err != nil {
+ return false
+ }
+ return isCygwinPipeName(name)
}
// Cygwin/msys's pty is a pipe.
diff --git a/vendor/github.com/mattn/go-runewidth/.travis.yml b/vendor/github.com/mattn/go-runewidth/.travis.yml
new file mode 100644
index 0000000000..5c9c2a30f0
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/.travis.yml
@@ -0,0 +1,8 @@
+language: go
+go:
+ - tip
+before_install:
+ - go get github.com/mattn/goveralls
+ - go get golang.org/x/tools/cmd/cover
+script:
+ - $HOME/gopath/bin/goveralls -repotoken lAKAWPzcGsD3A8yBX3BGGtRUdJ6CaGERL
diff --git a/vendor/github.com/mattn/go-runewidth/LICENSE b/vendor/github.com/mattn/go-runewidth/LICENSE
new file mode 100644
index 0000000000..91b5cef30e
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2016 Yasuhiro Matsumoto
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mattn/go-runewidth/README.mkd b/vendor/github.com/mattn/go-runewidth/README.mkd
new file mode 100644
index 0000000000..66663a94b0
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/README.mkd
@@ -0,0 +1,27 @@
+go-runewidth
+============
+
+[![Build Status](https://travis-ci.org/mattn/go-runewidth.png?branch=master)](https://travis-ci.org/mattn/go-runewidth)
+[![Coverage Status](https://coveralls.io/repos/mattn/go-runewidth/badge.png?branch=HEAD)](https://coveralls.io/r/mattn/go-runewidth?branch=HEAD)
+[![GoDoc](https://godoc.org/github.com/mattn/go-runewidth?status.svg)](http://godoc.org/github.com/mattn/go-runewidth)
+[![Go Report Card](https://goreportcard.com/badge/github.com/mattn/go-runewidth)](https://goreportcard.com/report/github.com/mattn/go-runewidth)
+
+Provides functions to get fixed width of the character or string.
+
+Usage
+-----
+
+```go
+runewidth.StringWidth("つのだ☆HIRO") == 12
+```
+
+
+Author
+------
+
+Yasuhiro Matsumoto
+
+License
+-------
+
+under the MIT License: http://mattn.mit-license.org/2013
diff --git a/vendor/github.com/mattn/go-runewidth/go.mod b/vendor/github.com/mattn/go-runewidth/go.mod
new file mode 100644
index 0000000000..fa7f4d864e
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/go.mod
@@ -0,0 +1,3 @@
+module github.com/mattn/go-runewidth
+
+go 1.9
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth.go b/vendor/github.com/mattn/go-runewidth/runewidth.go
new file mode 100644
index 0000000000..8d64da0778
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth.go
@@ -0,0 +1,258 @@
+package runewidth
+
+import (
+ "os"
+)
+
+//go:generate go run script/generate.go
+
+var (
+ // EastAsianWidth will be set true if the current locale is CJK
+ EastAsianWidth bool
+
+ // ZeroWidthJoiner is flag to set to use UTR#51 ZWJ
+ ZeroWidthJoiner bool
+
+ // DefaultCondition is a condition in current locale
+ DefaultCondition = &Condition{}
+)
+
+func init() {
+ handleEnv()
+}
+
+func handleEnv() {
+ env := os.Getenv("RUNEWIDTH_EASTASIAN")
+ if env == "" {
+ EastAsianWidth = IsEastAsian()
+ } else {
+ EastAsianWidth = env == "1"
+ }
+ // update DefaultCondition
+ DefaultCondition.EastAsianWidth = EastAsianWidth
+ DefaultCondition.ZeroWidthJoiner = ZeroWidthJoiner
+}
+
+type interval struct {
+ first rune
+ last rune
+}
+
+type table []interval
+
+func inTables(r rune, ts ...table) bool {
+ for _, t := range ts {
+ if inTable(r, t) {
+ return true
+ }
+ }
+ return false
+}
+
+func inTable(r rune, t table) bool {
+ // func (t table) IncludesRune(r rune) bool {
+ if r < t[0].first {
+ return false
+ }
+
+ bot := 0
+ top := len(t) - 1
+ for top >= bot {
+ mid := (bot + top) >> 1
+
+ switch {
+ case t[mid].last < r:
+ bot = mid + 1
+ case t[mid].first > r:
+ top = mid - 1
+ default:
+ return true
+ }
+ }
+
+ return false
+}
+
+var private = table{
+ {0x00E000, 0x00F8FF}, {0x0F0000, 0x0FFFFD}, {0x100000, 0x10FFFD},
+}
+
+var nonprint = table{
+ {0x0000, 0x001F}, {0x007F, 0x009F}, {0x00AD, 0x00AD},
+ {0x070F, 0x070F}, {0x180B, 0x180E}, {0x200B, 0x200F},
+ {0x2028, 0x202E}, {0x206A, 0x206F}, {0xD800, 0xDFFF},
+ {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFB}, {0xFFFE, 0xFFFF},
+}
+
+// Condition have flag EastAsianWidth whether the current locale is CJK or not.
+type Condition struct {
+ EastAsianWidth bool
+ ZeroWidthJoiner bool
+}
+
+// NewCondition return new instance of Condition which is current locale.
+func NewCondition() *Condition {
+ return &Condition{
+ EastAsianWidth: EastAsianWidth,
+ ZeroWidthJoiner: ZeroWidthJoiner,
+ }
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func (c *Condition) RuneWidth(r rune) int {
+ switch {
+ case r < 0 || r > 0x10FFFF || inTables(r, nonprint, combining, notassigned):
+ return 0
+ case (c.EastAsianWidth && IsAmbiguousWidth(r)) || inTables(r, doublewidth):
+ return 2
+ default:
+ return 1
+ }
+}
+
+func (c *Condition) stringWidth(s string) (width int) {
+ for _, r := range []rune(s) {
+ width += c.RuneWidth(r)
+ }
+ return width
+}
+
+func (c *Condition) stringWidthZeroJoiner(s string) (width int) {
+ r1, r2 := rune(0), rune(0)
+ for _, r := range []rune(s) {
+ if r == 0xFE0E || r == 0xFE0F {
+ continue
+ }
+ w := c.RuneWidth(r)
+ if r2 == 0x200D && inTables(r, emoji) && inTables(r1, emoji) {
+ if width < w {
+ width = w
+ }
+ } else {
+ width += w
+ }
+ r1, r2 = r2, r
+ }
+ return width
+}
+
+// StringWidth return width as you can see
+func (c *Condition) StringWidth(s string) (width int) {
+ if c.ZeroWidthJoiner {
+ return c.stringWidthZeroJoiner(s)
+ }
+ return c.stringWidth(s)
+}
+
+// Truncate return string truncated with w cells
+func (c *Condition) Truncate(s string, w int, tail string) string {
+ if c.StringWidth(s) <= w {
+ return s
+ }
+ r := []rune(s)
+ tw := c.StringWidth(tail)
+ w -= tw
+ width := 0
+ i := 0
+ for ; i < len(r); i++ {
+ cw := c.RuneWidth(r[i])
+ if width+cw > w {
+ break
+ }
+ width += cw
+ }
+ return string(r[0:i]) + tail
+}
+
+// Wrap return string wrapped with w cells
+func (c *Condition) Wrap(s string, w int) string {
+ width := 0
+ out := ""
+ for _, r := range []rune(s) {
+ cw := RuneWidth(r)
+ if r == '\n' {
+ out += string(r)
+ width = 0
+ continue
+ } else if width+cw > w {
+ out += "\n"
+ width = 0
+ out += string(r)
+ width += cw
+ continue
+ }
+ out += string(r)
+ width += cw
+ }
+ return out
+}
+
+// FillLeft return string filled in left by spaces in w cells
+func (c *Condition) FillLeft(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return string(b) + s
+ }
+ return s
+}
+
+// FillRight return string filled in left by spaces in w cells
+func (c *Condition) FillRight(s string, w int) string {
+ width := c.StringWidth(s)
+ count := w - width
+ if count > 0 {
+ b := make([]byte, count)
+ for i := range b {
+ b[i] = ' '
+ }
+ return s + string(b)
+ }
+ return s
+}
+
+// RuneWidth returns the number of cells in r.
+// See http://www.unicode.org/reports/tr11/
+func RuneWidth(r rune) int {
+ return DefaultCondition.RuneWidth(r)
+}
+
+// IsAmbiguousWidth returns whether is ambiguous width or not.
+func IsAmbiguousWidth(r rune) bool {
+ return inTables(r, private, ambiguous)
+}
+
+// IsNeutralWidth returns whether is neutral width or not.
+func IsNeutralWidth(r rune) bool {
+ return inTable(r, neutral)
+}
+
+// StringWidth return width as you can see
+func StringWidth(s string) (width int) {
+ return DefaultCondition.StringWidth(s)
+}
+
+// Truncate return string truncated with w cells
+func Truncate(s string, w int, tail string) string {
+ return DefaultCondition.Truncate(s, w, tail)
+}
+
+// Wrap return string wrapped with w cells
+func Wrap(s string, w int) string {
+ return DefaultCondition.Wrap(s, w)
+}
+
+// FillLeft return string filled in left by spaces in w cells
+func FillLeft(s string, w int) string {
+ return DefaultCondition.FillLeft(s, w)
+}
+
+// FillRight return string filled in left by spaces in w cells
+func FillRight(s string, w int) string {
+ return DefaultCondition.FillRight(s, w)
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
new file mode 100644
index 0000000000..7d99f6e521
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_appengine.go
@@ -0,0 +1,8 @@
+// +build appengine
+
+package runewidth
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ return false
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_js.go b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
new file mode 100644
index 0000000000..c5fdf40baa
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_js.go
@@ -0,0 +1,9 @@
+// +build js
+// +build !appengine
+
+package runewidth
+
+func IsEastAsian() bool {
+ // TODO: Implement this for the web. Detect east asian in a compatible way, and return true.
+ return false
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_posix.go b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
new file mode 100644
index 0000000000..66a58b5d87
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_posix.go
@@ -0,0 +1,79 @@
+// +build !windows
+// +build !js
+// +build !appengine
+
+package runewidth
+
+import (
+ "os"
+ "regexp"
+ "strings"
+)
+
+var reLoc = regexp.MustCompile(`^[a-z][a-z][a-z]?(?:_[A-Z][A-Z])?\.(.+)`)
+
+var mblenTable = map[string]int{
+ "utf-8": 6,
+ "utf8": 6,
+ "jis": 8,
+ "eucjp": 3,
+ "euckr": 2,
+ "euccn": 2,
+ "sjis": 2,
+ "cp932": 2,
+ "cp51932": 2,
+ "cp936": 2,
+ "cp949": 2,
+ "cp950": 2,
+ "big5": 2,
+ "gbk": 2,
+ "gb2312": 2,
+}
+
+func isEastAsian(locale string) bool {
+ charset := strings.ToLower(locale)
+ r := reLoc.FindStringSubmatch(locale)
+ if len(r) == 2 {
+ charset = strings.ToLower(r[1])
+ }
+
+ if strings.HasSuffix(charset, "@cjk_narrow") {
+ return false
+ }
+
+ for pos, b := range []byte(charset) {
+ if b == '@' {
+ charset = charset[:pos]
+ break
+ }
+ }
+ max := 1
+ if m, ok := mblenTable[charset]; ok {
+ max = m
+ }
+ if max > 1 && (charset[0] != 'u' ||
+ strings.HasPrefix(locale, "ja") ||
+ strings.HasPrefix(locale, "ko") ||
+ strings.HasPrefix(locale, "zh")) {
+ return true
+ }
+ return false
+}
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ locale := os.Getenv("LC_CTYPE")
+ if locale == "" {
+ locale = os.Getenv("LANG")
+ }
+
+ // ignore C locale
+ if locale == "POSIX" || locale == "C" {
+ return false
+ }
+ if len(locale) > 1 && locale[0] == 'C' && (locale[1] == '.' || locale[1] == '-') {
+ return false
+ }
+
+ return isEastAsian(locale)
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_table.go b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
new file mode 100644
index 0000000000..9ca6d0e28b
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_table.go
@@ -0,0 +1,427 @@
+package runewidth
+
+var combining = table{
+ {0x0300, 0x036F}, {0x0483, 0x0489}, {0x07EB, 0x07F3},
+ {0x0C00, 0x0C00}, {0x0C04, 0x0C04}, {0x0D00, 0x0D01},
+ {0x135D, 0x135F}, {0x1A7F, 0x1A7F}, {0x1AB0, 0x1ABE},
+ {0x1B6B, 0x1B73}, {0x1DC0, 0x1DF9}, {0x1DFB, 0x1DFF},
+ {0x20D0, 0x20F0}, {0x2CEF, 0x2CF1}, {0x2DE0, 0x2DFF},
+ {0x3099, 0x309A}, {0xA66F, 0xA672}, {0xA674, 0xA67D},
+ {0xA69E, 0xA69F}, {0xA6F0, 0xA6F1}, {0xA8E0, 0xA8F1},
+ {0xFE20, 0xFE2F}, {0x101FD, 0x101FD}, {0x10376, 0x1037A},
+ {0x10F46, 0x10F50}, {0x11300, 0x11301}, {0x1133B, 0x1133C},
+ {0x11366, 0x1136C}, {0x11370, 0x11374}, {0x16AF0, 0x16AF4},
+ {0x1D165, 0x1D169}, {0x1D16D, 0x1D172}, {0x1D17B, 0x1D182},
+ {0x1D185, 0x1D18B}, {0x1D1AA, 0x1D1AD}, {0x1D242, 0x1D244},
+ {0x1E000, 0x1E006}, {0x1E008, 0x1E018}, {0x1E01B, 0x1E021},
+ {0x1E023, 0x1E024}, {0x1E026, 0x1E02A}, {0x1E8D0, 0x1E8D6},
+}
+
+var doublewidth = table{
+ {0x1100, 0x115F}, {0x231A, 0x231B}, {0x2329, 0x232A},
+ {0x23E9, 0x23EC}, {0x23F0, 0x23F0}, {0x23F3, 0x23F3},
+ {0x25FD, 0x25FE}, {0x2614, 0x2615}, {0x2648, 0x2653},
+ {0x267F, 0x267F}, {0x2693, 0x2693}, {0x26A1, 0x26A1},
+ {0x26AA, 0x26AB}, {0x26BD, 0x26BE}, {0x26C4, 0x26C5},
+ {0x26CE, 0x26CE}, {0x26D4, 0x26D4}, {0x26EA, 0x26EA},
+ {0x26F2, 0x26F3}, {0x26F5, 0x26F5}, {0x26FA, 0x26FA},
+ {0x26FD, 0x26FD}, {0x2705, 0x2705}, {0x270A, 0x270B},
+ {0x2728, 0x2728}, {0x274C, 0x274C}, {0x274E, 0x274E},
+ {0x2753, 0x2755}, {0x2757, 0x2757}, {0x2795, 0x2797},
+ {0x27B0, 0x27B0}, {0x27BF, 0x27BF}, {0x2B1B, 0x2B1C},
+ {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x2E80, 0x2E99},
+ {0x2E9B, 0x2EF3}, {0x2F00, 0x2FD5}, {0x2FF0, 0x2FFB},
+ {0x3000, 0x303E}, {0x3041, 0x3096}, {0x3099, 0x30FF},
+ {0x3105, 0x312F}, {0x3131, 0x318E}, {0x3190, 0x31BA},
+ {0x31C0, 0x31E3}, {0x31F0, 0x321E}, {0x3220, 0x3247},
+ {0x3250, 0x4DBF}, {0x4E00, 0xA48C}, {0xA490, 0xA4C6},
+ {0xA960, 0xA97C}, {0xAC00, 0xD7A3}, {0xF900, 0xFAFF},
+ {0xFE10, 0xFE19}, {0xFE30, 0xFE52}, {0xFE54, 0xFE66},
+ {0xFE68, 0xFE6B}, {0xFF01, 0xFF60}, {0xFFE0, 0xFFE6},
+ {0x16FE0, 0x16FE3}, {0x17000, 0x187F7}, {0x18800, 0x18AF2},
+ {0x1B000, 0x1B11E}, {0x1B150, 0x1B152}, {0x1B164, 0x1B167},
+ {0x1B170, 0x1B2FB}, {0x1F004, 0x1F004}, {0x1F0CF, 0x1F0CF},
+ {0x1F18E, 0x1F18E}, {0x1F191, 0x1F19A}, {0x1F200, 0x1F202},
+ {0x1F210, 0x1F23B}, {0x1F240, 0x1F248}, {0x1F250, 0x1F251},
+ {0x1F260, 0x1F265}, {0x1F300, 0x1F320}, {0x1F32D, 0x1F335},
+ {0x1F337, 0x1F37C}, {0x1F37E, 0x1F393}, {0x1F3A0, 0x1F3CA},
+ {0x1F3CF, 0x1F3D3}, {0x1F3E0, 0x1F3F0}, {0x1F3F4, 0x1F3F4},
+ {0x1F3F8, 0x1F43E}, {0x1F440, 0x1F440}, {0x1F442, 0x1F4FC},
+ {0x1F4FF, 0x1F53D}, {0x1F54B, 0x1F54E}, {0x1F550, 0x1F567},
+ {0x1F57A, 0x1F57A}, {0x1F595, 0x1F596}, {0x1F5A4, 0x1F5A4},
+ {0x1F5FB, 0x1F64F}, {0x1F680, 0x1F6C5}, {0x1F6CC, 0x1F6CC},
+ {0x1F6D0, 0x1F6D2}, {0x1F6D5, 0x1F6D5}, {0x1F6EB, 0x1F6EC},
+ {0x1F6F4, 0x1F6FA}, {0x1F7E0, 0x1F7EB}, {0x1F90D, 0x1F971},
+ {0x1F973, 0x1F976}, {0x1F97A, 0x1F9A2}, {0x1F9A5, 0x1F9AA},
+ {0x1F9AE, 0x1F9CA}, {0x1F9CD, 0x1F9FF}, {0x1FA70, 0x1FA73},
+ {0x1FA78, 0x1FA7A}, {0x1FA80, 0x1FA82}, {0x1FA90, 0x1FA95},
+ {0x20000, 0x2FFFD}, {0x30000, 0x3FFFD},
+}
+
+var ambiguous = table{
+ {0x00A1, 0x00A1}, {0x00A4, 0x00A4}, {0x00A7, 0x00A8},
+ {0x00AA, 0x00AA}, {0x00AD, 0x00AE}, {0x00B0, 0x00B4},
+ {0x00B6, 0x00BA}, {0x00BC, 0x00BF}, {0x00C6, 0x00C6},
+ {0x00D0, 0x00D0}, {0x00D7, 0x00D8}, {0x00DE, 0x00E1},
+ {0x00E6, 0x00E6}, {0x00E8, 0x00EA}, {0x00EC, 0x00ED},
+ {0x00F0, 0x00F0}, {0x00F2, 0x00F3}, {0x00F7, 0x00FA},
+ {0x00FC, 0x00FC}, {0x00FE, 0x00FE}, {0x0101, 0x0101},
+ {0x0111, 0x0111}, {0x0113, 0x0113}, {0x011B, 0x011B},
+ {0x0126, 0x0127}, {0x012B, 0x012B}, {0x0131, 0x0133},
+ {0x0138, 0x0138}, {0x013F, 0x0142}, {0x0144, 0x0144},
+ {0x0148, 0x014B}, {0x014D, 0x014D}, {0x0152, 0x0153},
+ {0x0166, 0x0167}, {0x016B, 0x016B}, {0x01CE, 0x01CE},
+ {0x01D0, 0x01D0}, {0x01D2, 0x01D2}, {0x01D4, 0x01D4},
+ {0x01D6, 0x01D6}, {0x01D8, 0x01D8}, {0x01DA, 0x01DA},
+ {0x01DC, 0x01DC}, {0x0251, 0x0251}, {0x0261, 0x0261},
+ {0x02C4, 0x02C4}, {0x02C7, 0x02C7}, {0x02C9, 0x02CB},
+ {0x02CD, 0x02CD}, {0x02D0, 0x02D0}, {0x02D8, 0x02DB},
+ {0x02DD, 0x02DD}, {0x02DF, 0x02DF}, {0x0300, 0x036F},
+ {0x0391, 0x03A1}, {0x03A3, 0x03A9}, {0x03B1, 0x03C1},
+ {0x03C3, 0x03C9}, {0x0401, 0x0401}, {0x0410, 0x044F},
+ {0x0451, 0x0451}, {0x2010, 0x2010}, {0x2013, 0x2016},
+ {0x2018, 0x2019}, {0x201C, 0x201D}, {0x2020, 0x2022},
+ {0x2024, 0x2027}, {0x2030, 0x2030}, {0x2032, 0x2033},
+ {0x2035, 0x2035}, {0x203B, 0x203B}, {0x203E, 0x203E},
+ {0x2074, 0x2074}, {0x207F, 0x207F}, {0x2081, 0x2084},
+ {0x20AC, 0x20AC}, {0x2103, 0x2103}, {0x2105, 0x2105},
+ {0x2109, 0x2109}, {0x2113, 0x2113}, {0x2116, 0x2116},
+ {0x2121, 0x2122}, {0x2126, 0x2126}, {0x212B, 0x212B},
+ {0x2153, 0x2154}, {0x215B, 0x215E}, {0x2160, 0x216B},
+ {0x2170, 0x2179}, {0x2189, 0x2189}, {0x2190, 0x2199},
+ {0x21B8, 0x21B9}, {0x21D2, 0x21D2}, {0x21D4, 0x21D4},
+ {0x21E7, 0x21E7}, {0x2200, 0x2200}, {0x2202, 0x2203},
+ {0x2207, 0x2208}, {0x220B, 0x220B}, {0x220F, 0x220F},
+ {0x2211, 0x2211}, {0x2215, 0x2215}, {0x221A, 0x221A},
+ {0x221D, 0x2220}, {0x2223, 0x2223}, {0x2225, 0x2225},
+ {0x2227, 0x222C}, {0x222E, 0x222E}, {0x2234, 0x2237},
+ {0x223C, 0x223D}, {0x2248, 0x2248}, {0x224C, 0x224C},
+ {0x2252, 0x2252}, {0x2260, 0x2261}, {0x2264, 0x2267},
+ {0x226A, 0x226B}, {0x226E, 0x226F}, {0x2282, 0x2283},
+ {0x2286, 0x2287}, {0x2295, 0x2295}, {0x2299, 0x2299},
+ {0x22A5, 0x22A5}, {0x22BF, 0x22BF}, {0x2312, 0x2312},
+ {0x2460, 0x24E9}, {0x24EB, 0x254B}, {0x2550, 0x2573},
+ {0x2580, 0x258F}, {0x2592, 0x2595}, {0x25A0, 0x25A1},
+ {0x25A3, 0x25A9}, {0x25B2, 0x25B3}, {0x25B6, 0x25B7},
+ {0x25BC, 0x25BD}, {0x25C0, 0x25C1}, {0x25C6, 0x25C8},
+ {0x25CB, 0x25CB}, {0x25CE, 0x25D1}, {0x25E2, 0x25E5},
+ {0x25EF, 0x25EF}, {0x2605, 0x2606}, {0x2609, 0x2609},
+ {0x260E, 0x260F}, {0x261C, 0x261C}, {0x261E, 0x261E},
+ {0x2640, 0x2640}, {0x2642, 0x2642}, {0x2660, 0x2661},
+ {0x2663, 0x2665}, {0x2667, 0x266A}, {0x266C, 0x266D},
+ {0x266F, 0x266F}, {0x269E, 0x269F}, {0x26BF, 0x26BF},
+ {0x26C6, 0x26CD}, {0x26CF, 0x26D3}, {0x26D5, 0x26E1},
+ {0x26E3, 0x26E3}, {0x26E8, 0x26E9}, {0x26EB, 0x26F1},
+ {0x26F4, 0x26F4}, {0x26F6, 0x26F9}, {0x26FB, 0x26FC},
+ {0x26FE, 0x26FF}, {0x273D, 0x273D}, {0x2776, 0x277F},
+ {0x2B56, 0x2B59}, {0x3248, 0x324F}, {0xE000, 0xF8FF},
+ {0xFE00, 0xFE0F}, {0xFFFD, 0xFFFD}, {0x1F100, 0x1F10A},
+ {0x1F110, 0x1F12D}, {0x1F130, 0x1F169}, {0x1F170, 0x1F18D},
+ {0x1F18F, 0x1F190}, {0x1F19B, 0x1F1AC}, {0xE0100, 0xE01EF},
+ {0xF0000, 0xFFFFD}, {0x100000, 0x10FFFD},
+}
+var notassigned = table{
+ {0x27E6, 0x27ED}, {0x2985, 0x2986},
+}
+
+var neutral = table{
+ {0x0000, 0x001F}, {0x007F, 0x00A0}, {0x00A9, 0x00A9},
+ {0x00AB, 0x00AB}, {0x00B5, 0x00B5}, {0x00BB, 0x00BB},
+ {0x00C0, 0x00C5}, {0x00C7, 0x00CF}, {0x00D1, 0x00D6},
+ {0x00D9, 0x00DD}, {0x00E2, 0x00E5}, {0x00E7, 0x00E7},
+ {0x00EB, 0x00EB}, {0x00EE, 0x00EF}, {0x00F1, 0x00F1},
+ {0x00F4, 0x00F6}, {0x00FB, 0x00FB}, {0x00FD, 0x00FD},
+ {0x00FF, 0x0100}, {0x0102, 0x0110}, {0x0112, 0x0112},
+ {0x0114, 0x011A}, {0x011C, 0x0125}, {0x0128, 0x012A},
+ {0x012C, 0x0130}, {0x0134, 0x0137}, {0x0139, 0x013E},
+ {0x0143, 0x0143}, {0x0145, 0x0147}, {0x014C, 0x014C},
+ {0x014E, 0x0151}, {0x0154, 0x0165}, {0x0168, 0x016A},
+ {0x016C, 0x01CD}, {0x01CF, 0x01CF}, {0x01D1, 0x01D1},
+ {0x01D3, 0x01D3}, {0x01D5, 0x01D5}, {0x01D7, 0x01D7},
+ {0x01D9, 0x01D9}, {0x01DB, 0x01DB}, {0x01DD, 0x0250},
+ {0x0252, 0x0260}, {0x0262, 0x02C3}, {0x02C5, 0x02C6},
+ {0x02C8, 0x02C8}, {0x02CC, 0x02CC}, {0x02CE, 0x02CF},
+ {0x02D1, 0x02D7}, {0x02DC, 0x02DC}, {0x02DE, 0x02DE},
+ {0x02E0, 0x02FF}, {0x0370, 0x0377}, {0x037A, 0x037F},
+ {0x0384, 0x038A}, {0x038C, 0x038C}, {0x038E, 0x0390},
+ {0x03AA, 0x03B0}, {0x03C2, 0x03C2}, {0x03CA, 0x0400},
+ {0x0402, 0x040F}, {0x0450, 0x0450}, {0x0452, 0x052F},
+ {0x0531, 0x0556}, {0x0559, 0x058A}, {0x058D, 0x058F},
+ {0x0591, 0x05C7}, {0x05D0, 0x05EA}, {0x05EF, 0x05F4},
+ {0x0600, 0x061C}, {0x061E, 0x070D}, {0x070F, 0x074A},
+ {0x074D, 0x07B1}, {0x07C0, 0x07FA}, {0x07FD, 0x082D},
+ {0x0830, 0x083E}, {0x0840, 0x085B}, {0x085E, 0x085E},
+ {0x0860, 0x086A}, {0x08A0, 0x08B4}, {0x08B6, 0x08BD},
+ {0x08D3, 0x0983}, {0x0985, 0x098C}, {0x098F, 0x0990},
+ {0x0993, 0x09A8}, {0x09AA, 0x09B0}, {0x09B2, 0x09B2},
+ {0x09B6, 0x09B9}, {0x09BC, 0x09C4}, {0x09C7, 0x09C8},
+ {0x09CB, 0x09CE}, {0x09D7, 0x09D7}, {0x09DC, 0x09DD},
+ {0x09DF, 0x09E3}, {0x09E6, 0x09FE}, {0x0A01, 0x0A03},
+ {0x0A05, 0x0A0A}, {0x0A0F, 0x0A10}, {0x0A13, 0x0A28},
+ {0x0A2A, 0x0A30}, {0x0A32, 0x0A33}, {0x0A35, 0x0A36},
+ {0x0A38, 0x0A39}, {0x0A3C, 0x0A3C}, {0x0A3E, 0x0A42},
+ {0x0A47, 0x0A48}, {0x0A4B, 0x0A4D}, {0x0A51, 0x0A51},
+ {0x0A59, 0x0A5C}, {0x0A5E, 0x0A5E}, {0x0A66, 0x0A76},
+ {0x0A81, 0x0A83}, {0x0A85, 0x0A8D}, {0x0A8F, 0x0A91},
+ {0x0A93, 0x0AA8}, {0x0AAA, 0x0AB0}, {0x0AB2, 0x0AB3},
+ {0x0AB5, 0x0AB9}, {0x0ABC, 0x0AC5}, {0x0AC7, 0x0AC9},
+ {0x0ACB, 0x0ACD}, {0x0AD0, 0x0AD0}, {0x0AE0, 0x0AE3},
+ {0x0AE6, 0x0AF1}, {0x0AF9, 0x0AFF}, {0x0B01, 0x0B03},
+ {0x0B05, 0x0B0C}, {0x0B0F, 0x0B10}, {0x0B13, 0x0B28},
+ {0x0B2A, 0x0B30}, {0x0B32, 0x0B33}, {0x0B35, 0x0B39},
+ {0x0B3C, 0x0B44}, {0x0B47, 0x0B48}, {0x0B4B, 0x0B4D},
+ {0x0B56, 0x0B57}, {0x0B5C, 0x0B5D}, {0x0B5F, 0x0B63},
+ {0x0B66, 0x0B77}, {0x0B82, 0x0B83}, {0x0B85, 0x0B8A},
+ {0x0B8E, 0x0B90}, {0x0B92, 0x0B95}, {0x0B99, 0x0B9A},
+ {0x0B9C, 0x0B9C}, {0x0B9E, 0x0B9F}, {0x0BA3, 0x0BA4},
+ {0x0BA8, 0x0BAA}, {0x0BAE, 0x0BB9}, {0x0BBE, 0x0BC2},
+ {0x0BC6, 0x0BC8}, {0x0BCA, 0x0BCD}, {0x0BD0, 0x0BD0},
+ {0x0BD7, 0x0BD7}, {0x0BE6, 0x0BFA}, {0x0C00, 0x0C0C},
+ {0x0C0E, 0x0C10}, {0x0C12, 0x0C28}, {0x0C2A, 0x0C39},
+ {0x0C3D, 0x0C44}, {0x0C46, 0x0C48}, {0x0C4A, 0x0C4D},
+ {0x0C55, 0x0C56}, {0x0C58, 0x0C5A}, {0x0C60, 0x0C63},
+ {0x0C66, 0x0C6F}, {0x0C77, 0x0C8C}, {0x0C8E, 0x0C90},
+ {0x0C92, 0x0CA8}, {0x0CAA, 0x0CB3}, {0x0CB5, 0x0CB9},
+ {0x0CBC, 0x0CC4}, {0x0CC6, 0x0CC8}, {0x0CCA, 0x0CCD},
+ {0x0CD5, 0x0CD6}, {0x0CDE, 0x0CDE}, {0x0CE0, 0x0CE3},
+ {0x0CE6, 0x0CEF}, {0x0CF1, 0x0CF2}, {0x0D00, 0x0D03},
+ {0x0D05, 0x0D0C}, {0x0D0E, 0x0D10}, {0x0D12, 0x0D44},
+ {0x0D46, 0x0D48}, {0x0D4A, 0x0D4F}, {0x0D54, 0x0D63},
+ {0x0D66, 0x0D7F}, {0x0D82, 0x0D83}, {0x0D85, 0x0D96},
+ {0x0D9A, 0x0DB1}, {0x0DB3, 0x0DBB}, {0x0DBD, 0x0DBD},
+ {0x0DC0, 0x0DC6}, {0x0DCA, 0x0DCA}, {0x0DCF, 0x0DD4},
+ {0x0DD6, 0x0DD6}, {0x0DD8, 0x0DDF}, {0x0DE6, 0x0DEF},
+ {0x0DF2, 0x0DF4}, {0x0E01, 0x0E3A}, {0x0E3F, 0x0E5B},
+ {0x0E81, 0x0E82}, {0x0E84, 0x0E84}, {0x0E86, 0x0E8A},
+ {0x0E8C, 0x0EA3}, {0x0EA5, 0x0EA5}, {0x0EA7, 0x0EBD},
+ {0x0EC0, 0x0EC4}, {0x0EC6, 0x0EC6}, {0x0EC8, 0x0ECD},
+ {0x0ED0, 0x0ED9}, {0x0EDC, 0x0EDF}, {0x0F00, 0x0F47},
+ {0x0F49, 0x0F6C}, {0x0F71, 0x0F97}, {0x0F99, 0x0FBC},
+ {0x0FBE, 0x0FCC}, {0x0FCE, 0x0FDA}, {0x1000, 0x10C5},
+ {0x10C7, 0x10C7}, {0x10CD, 0x10CD}, {0x10D0, 0x10FF},
+ {0x1160, 0x1248}, {0x124A, 0x124D}, {0x1250, 0x1256},
+ {0x1258, 0x1258}, {0x125A, 0x125D}, {0x1260, 0x1288},
+ {0x128A, 0x128D}, {0x1290, 0x12B0}, {0x12B2, 0x12B5},
+ {0x12B8, 0x12BE}, {0x12C0, 0x12C0}, {0x12C2, 0x12C5},
+ {0x12C8, 0x12D6}, {0x12D8, 0x1310}, {0x1312, 0x1315},
+ {0x1318, 0x135A}, {0x135D, 0x137C}, {0x1380, 0x1399},
+ {0x13A0, 0x13F5}, {0x13F8, 0x13FD}, {0x1400, 0x169C},
+ {0x16A0, 0x16F8}, {0x1700, 0x170C}, {0x170E, 0x1714},
+ {0x1720, 0x1736}, {0x1740, 0x1753}, {0x1760, 0x176C},
+ {0x176E, 0x1770}, {0x1772, 0x1773}, {0x1780, 0x17DD},
+ {0x17E0, 0x17E9}, {0x17F0, 0x17F9}, {0x1800, 0x180E},
+ {0x1810, 0x1819}, {0x1820, 0x1878}, {0x1880, 0x18AA},
+ {0x18B0, 0x18F5}, {0x1900, 0x191E}, {0x1920, 0x192B},
+ {0x1930, 0x193B}, {0x1940, 0x1940}, {0x1944, 0x196D},
+ {0x1970, 0x1974}, {0x1980, 0x19AB}, {0x19B0, 0x19C9},
+ {0x19D0, 0x19DA}, {0x19DE, 0x1A1B}, {0x1A1E, 0x1A5E},
+ {0x1A60, 0x1A7C}, {0x1A7F, 0x1A89}, {0x1A90, 0x1A99},
+ {0x1AA0, 0x1AAD}, {0x1AB0, 0x1ABE}, {0x1B00, 0x1B4B},
+ {0x1B50, 0x1B7C}, {0x1B80, 0x1BF3}, {0x1BFC, 0x1C37},
+ {0x1C3B, 0x1C49}, {0x1C4D, 0x1C88}, {0x1C90, 0x1CBA},
+ {0x1CBD, 0x1CC7}, {0x1CD0, 0x1CFA}, {0x1D00, 0x1DF9},
+ {0x1DFB, 0x1F15}, {0x1F18, 0x1F1D}, {0x1F20, 0x1F45},
+ {0x1F48, 0x1F4D}, {0x1F50, 0x1F57}, {0x1F59, 0x1F59},
+ {0x1F5B, 0x1F5B}, {0x1F5D, 0x1F5D}, {0x1F5F, 0x1F7D},
+ {0x1F80, 0x1FB4}, {0x1FB6, 0x1FC4}, {0x1FC6, 0x1FD3},
+ {0x1FD6, 0x1FDB}, {0x1FDD, 0x1FEF}, {0x1FF2, 0x1FF4},
+ {0x1FF6, 0x1FFE}, {0x2000, 0x200F}, {0x2011, 0x2012},
+ {0x2017, 0x2017}, {0x201A, 0x201B}, {0x201E, 0x201F},
+ {0x2023, 0x2023}, {0x2028, 0x202F}, {0x2031, 0x2031},
+ {0x2034, 0x2034}, {0x2036, 0x203A}, {0x203C, 0x203D},
+ {0x203F, 0x2064}, {0x2066, 0x2071}, {0x2075, 0x207E},
+ {0x2080, 0x2080}, {0x2085, 0x208E}, {0x2090, 0x209C},
+ {0x20A0, 0x20A8}, {0x20AA, 0x20AB}, {0x20AD, 0x20BF},
+ {0x20D0, 0x20F0}, {0x2100, 0x2102}, {0x2104, 0x2104},
+ {0x2106, 0x2108}, {0x210A, 0x2112}, {0x2114, 0x2115},
+ {0x2117, 0x2120}, {0x2123, 0x2125}, {0x2127, 0x212A},
+ {0x212C, 0x2152}, {0x2155, 0x215A}, {0x215F, 0x215F},
+ {0x216C, 0x216F}, {0x217A, 0x2188}, {0x218A, 0x218B},
+ {0x219A, 0x21B7}, {0x21BA, 0x21D1}, {0x21D3, 0x21D3},
+ {0x21D5, 0x21E6}, {0x21E8, 0x21FF}, {0x2201, 0x2201},
+ {0x2204, 0x2206}, {0x2209, 0x220A}, {0x220C, 0x220E},
+ {0x2210, 0x2210}, {0x2212, 0x2214}, {0x2216, 0x2219},
+ {0x221B, 0x221C}, {0x2221, 0x2222}, {0x2224, 0x2224},
+ {0x2226, 0x2226}, {0x222D, 0x222D}, {0x222F, 0x2233},
+ {0x2238, 0x223B}, {0x223E, 0x2247}, {0x2249, 0x224B},
+ {0x224D, 0x2251}, {0x2253, 0x225F}, {0x2262, 0x2263},
+ {0x2268, 0x2269}, {0x226C, 0x226D}, {0x2270, 0x2281},
+ {0x2284, 0x2285}, {0x2288, 0x2294}, {0x2296, 0x2298},
+ {0x229A, 0x22A4}, {0x22A6, 0x22BE}, {0x22C0, 0x2311},
+ {0x2313, 0x2319}, {0x231C, 0x2328}, {0x232B, 0x23E8},
+ {0x23ED, 0x23EF}, {0x23F1, 0x23F2}, {0x23F4, 0x2426},
+ {0x2440, 0x244A}, {0x24EA, 0x24EA}, {0x254C, 0x254F},
+ {0x2574, 0x257F}, {0x2590, 0x2591}, {0x2596, 0x259F},
+ {0x25A2, 0x25A2}, {0x25AA, 0x25B1}, {0x25B4, 0x25B5},
+ {0x25B8, 0x25BB}, {0x25BE, 0x25BF}, {0x25C2, 0x25C5},
+ {0x25C9, 0x25CA}, {0x25CC, 0x25CD}, {0x25D2, 0x25E1},
+ {0x25E6, 0x25EE}, {0x25F0, 0x25FC}, {0x25FF, 0x2604},
+ {0x2607, 0x2608}, {0x260A, 0x260D}, {0x2610, 0x2613},
+ {0x2616, 0x261B}, {0x261D, 0x261D}, {0x261F, 0x263F},
+ {0x2641, 0x2641}, {0x2643, 0x2647}, {0x2654, 0x265F},
+ {0x2662, 0x2662}, {0x2666, 0x2666}, {0x266B, 0x266B},
+ {0x266E, 0x266E}, {0x2670, 0x267E}, {0x2680, 0x2692},
+ {0x2694, 0x269D}, {0x26A0, 0x26A0}, {0x26A2, 0x26A9},
+ {0x26AC, 0x26BC}, {0x26C0, 0x26C3}, {0x26E2, 0x26E2},
+ {0x26E4, 0x26E7}, {0x2700, 0x2704}, {0x2706, 0x2709},
+ {0x270C, 0x2727}, {0x2729, 0x273C}, {0x273E, 0x274B},
+ {0x274D, 0x274D}, {0x274F, 0x2752}, {0x2756, 0x2756},
+ {0x2758, 0x2775}, {0x2780, 0x2794}, {0x2798, 0x27AF},
+ {0x27B1, 0x27BE}, {0x27C0, 0x27E5}, {0x27EE, 0x2984},
+ {0x2987, 0x2B1A}, {0x2B1D, 0x2B4F}, {0x2B51, 0x2B54},
+ {0x2B5A, 0x2B73}, {0x2B76, 0x2B95}, {0x2B98, 0x2C2E},
+ {0x2C30, 0x2C5E}, {0x2C60, 0x2CF3}, {0x2CF9, 0x2D25},
+ {0x2D27, 0x2D27}, {0x2D2D, 0x2D2D}, {0x2D30, 0x2D67},
+ {0x2D6F, 0x2D70}, {0x2D7F, 0x2D96}, {0x2DA0, 0x2DA6},
+ {0x2DA8, 0x2DAE}, {0x2DB0, 0x2DB6}, {0x2DB8, 0x2DBE},
+ {0x2DC0, 0x2DC6}, {0x2DC8, 0x2DCE}, {0x2DD0, 0x2DD6},
+ {0x2DD8, 0x2DDE}, {0x2DE0, 0x2E4F}, {0x303F, 0x303F},
+ {0x4DC0, 0x4DFF}, {0xA4D0, 0xA62B}, {0xA640, 0xA6F7},
+ {0xA700, 0xA7BF}, {0xA7C2, 0xA7C6}, {0xA7F7, 0xA82B},
+ {0xA830, 0xA839}, {0xA840, 0xA877}, {0xA880, 0xA8C5},
+ {0xA8CE, 0xA8D9}, {0xA8E0, 0xA953}, {0xA95F, 0xA95F},
+ {0xA980, 0xA9CD}, {0xA9CF, 0xA9D9}, {0xA9DE, 0xA9FE},
+ {0xAA00, 0xAA36}, {0xAA40, 0xAA4D}, {0xAA50, 0xAA59},
+ {0xAA5C, 0xAAC2}, {0xAADB, 0xAAF6}, {0xAB01, 0xAB06},
+ {0xAB09, 0xAB0E}, {0xAB11, 0xAB16}, {0xAB20, 0xAB26},
+ {0xAB28, 0xAB2E}, {0xAB30, 0xAB67}, {0xAB70, 0xABED},
+ {0xABF0, 0xABF9}, {0xD7B0, 0xD7C6}, {0xD7CB, 0xD7FB},
+ {0xD800, 0xDFFF}, {0xFB00, 0xFB06}, {0xFB13, 0xFB17},
+ {0xFB1D, 0xFB36}, {0xFB38, 0xFB3C}, {0xFB3E, 0xFB3E},
+ {0xFB40, 0xFB41}, {0xFB43, 0xFB44}, {0xFB46, 0xFBC1},
+ {0xFBD3, 0xFD3F}, {0xFD50, 0xFD8F}, {0xFD92, 0xFDC7},
+ {0xFDF0, 0xFDFD}, {0xFE20, 0xFE2F}, {0xFE70, 0xFE74},
+ {0xFE76, 0xFEFC}, {0xFEFF, 0xFEFF}, {0xFFF9, 0xFFFC},
+ {0x10000, 0x1000B}, {0x1000D, 0x10026}, {0x10028, 0x1003A},
+ {0x1003C, 0x1003D}, {0x1003F, 0x1004D}, {0x10050, 0x1005D},
+ {0x10080, 0x100FA}, {0x10100, 0x10102}, {0x10107, 0x10133},
+ {0x10137, 0x1018E}, {0x10190, 0x1019B}, {0x101A0, 0x101A0},
+ {0x101D0, 0x101FD}, {0x10280, 0x1029C}, {0x102A0, 0x102D0},
+ {0x102E0, 0x102FB}, {0x10300, 0x10323}, {0x1032D, 0x1034A},
+ {0x10350, 0x1037A}, {0x10380, 0x1039D}, {0x1039F, 0x103C3},
+ {0x103C8, 0x103D5}, {0x10400, 0x1049D}, {0x104A0, 0x104A9},
+ {0x104B0, 0x104D3}, {0x104D8, 0x104FB}, {0x10500, 0x10527},
+ {0x10530, 0x10563}, {0x1056F, 0x1056F}, {0x10600, 0x10736},
+ {0x10740, 0x10755}, {0x10760, 0x10767}, {0x10800, 0x10805},
+ {0x10808, 0x10808}, {0x1080A, 0x10835}, {0x10837, 0x10838},
+ {0x1083C, 0x1083C}, {0x1083F, 0x10855}, {0x10857, 0x1089E},
+ {0x108A7, 0x108AF}, {0x108E0, 0x108F2}, {0x108F4, 0x108F5},
+ {0x108FB, 0x1091B}, {0x1091F, 0x10939}, {0x1093F, 0x1093F},
+ {0x10980, 0x109B7}, {0x109BC, 0x109CF}, {0x109D2, 0x10A03},
+ {0x10A05, 0x10A06}, {0x10A0C, 0x10A13}, {0x10A15, 0x10A17},
+ {0x10A19, 0x10A35}, {0x10A38, 0x10A3A}, {0x10A3F, 0x10A48},
+ {0x10A50, 0x10A58}, {0x10A60, 0x10A9F}, {0x10AC0, 0x10AE6},
+ {0x10AEB, 0x10AF6}, {0x10B00, 0x10B35}, {0x10B39, 0x10B55},
+ {0x10B58, 0x10B72}, {0x10B78, 0x10B91}, {0x10B99, 0x10B9C},
+ {0x10BA9, 0x10BAF}, {0x10C00, 0x10C48}, {0x10C80, 0x10CB2},
+ {0x10CC0, 0x10CF2}, {0x10CFA, 0x10D27}, {0x10D30, 0x10D39},
+ {0x10E60, 0x10E7E}, {0x10F00, 0x10F27}, {0x10F30, 0x10F59},
+ {0x10FE0, 0x10FF6}, {0x11000, 0x1104D}, {0x11052, 0x1106F},
+ {0x1107F, 0x110C1}, {0x110CD, 0x110CD}, {0x110D0, 0x110E8},
+ {0x110F0, 0x110F9}, {0x11100, 0x11134}, {0x11136, 0x11146},
+ {0x11150, 0x11176}, {0x11180, 0x111CD}, {0x111D0, 0x111DF},
+ {0x111E1, 0x111F4}, {0x11200, 0x11211}, {0x11213, 0x1123E},
+ {0x11280, 0x11286}, {0x11288, 0x11288}, {0x1128A, 0x1128D},
+ {0x1128F, 0x1129D}, {0x1129F, 0x112A9}, {0x112B0, 0x112EA},
+ {0x112F0, 0x112F9}, {0x11300, 0x11303}, {0x11305, 0x1130C},
+ {0x1130F, 0x11310}, {0x11313, 0x11328}, {0x1132A, 0x11330},
+ {0x11332, 0x11333}, {0x11335, 0x11339}, {0x1133B, 0x11344},
+ {0x11347, 0x11348}, {0x1134B, 0x1134D}, {0x11350, 0x11350},
+ {0x11357, 0x11357}, {0x1135D, 0x11363}, {0x11366, 0x1136C},
+ {0x11370, 0x11374}, {0x11400, 0x11459}, {0x1145B, 0x1145B},
+ {0x1145D, 0x1145F}, {0x11480, 0x114C7}, {0x114D0, 0x114D9},
+ {0x11580, 0x115B5}, {0x115B8, 0x115DD}, {0x11600, 0x11644},
+ {0x11650, 0x11659}, {0x11660, 0x1166C}, {0x11680, 0x116B8},
+ {0x116C0, 0x116C9}, {0x11700, 0x1171A}, {0x1171D, 0x1172B},
+ {0x11730, 0x1173F}, {0x11800, 0x1183B}, {0x118A0, 0x118F2},
+ {0x118FF, 0x118FF}, {0x119A0, 0x119A7}, {0x119AA, 0x119D7},
+ {0x119DA, 0x119E4}, {0x11A00, 0x11A47}, {0x11A50, 0x11AA2},
+ {0x11AC0, 0x11AF8}, {0x11C00, 0x11C08}, {0x11C0A, 0x11C36},
+ {0x11C38, 0x11C45}, {0x11C50, 0x11C6C}, {0x11C70, 0x11C8F},
+ {0x11C92, 0x11CA7}, {0x11CA9, 0x11CB6}, {0x11D00, 0x11D06},
+ {0x11D08, 0x11D09}, {0x11D0B, 0x11D36}, {0x11D3A, 0x11D3A},
+ {0x11D3C, 0x11D3D}, {0x11D3F, 0x11D47}, {0x11D50, 0x11D59},
+ {0x11D60, 0x11D65}, {0x11D67, 0x11D68}, {0x11D6A, 0x11D8E},
+ {0x11D90, 0x11D91}, {0x11D93, 0x11D98}, {0x11DA0, 0x11DA9},
+ {0x11EE0, 0x11EF8}, {0x11FC0, 0x11FF1}, {0x11FFF, 0x12399},
+ {0x12400, 0x1246E}, {0x12470, 0x12474}, {0x12480, 0x12543},
+ {0x13000, 0x1342E}, {0x13430, 0x13438}, {0x14400, 0x14646},
+ {0x16800, 0x16A38}, {0x16A40, 0x16A5E}, {0x16A60, 0x16A69},
+ {0x16A6E, 0x16A6F}, {0x16AD0, 0x16AED}, {0x16AF0, 0x16AF5},
+ {0x16B00, 0x16B45}, {0x16B50, 0x16B59}, {0x16B5B, 0x16B61},
+ {0x16B63, 0x16B77}, {0x16B7D, 0x16B8F}, {0x16E40, 0x16E9A},
+ {0x16F00, 0x16F4A}, {0x16F4F, 0x16F87}, {0x16F8F, 0x16F9F},
+ {0x1BC00, 0x1BC6A}, {0x1BC70, 0x1BC7C}, {0x1BC80, 0x1BC88},
+ {0x1BC90, 0x1BC99}, {0x1BC9C, 0x1BCA3}, {0x1D000, 0x1D0F5},
+ {0x1D100, 0x1D126}, {0x1D129, 0x1D1E8}, {0x1D200, 0x1D245},
+ {0x1D2E0, 0x1D2F3}, {0x1D300, 0x1D356}, {0x1D360, 0x1D378},
+ {0x1D400, 0x1D454}, {0x1D456, 0x1D49C}, {0x1D49E, 0x1D49F},
+ {0x1D4A2, 0x1D4A2}, {0x1D4A5, 0x1D4A6}, {0x1D4A9, 0x1D4AC},
+ {0x1D4AE, 0x1D4B9}, {0x1D4BB, 0x1D4BB}, {0x1D4BD, 0x1D4C3},
+ {0x1D4C5, 0x1D505}, {0x1D507, 0x1D50A}, {0x1D50D, 0x1D514},
+ {0x1D516, 0x1D51C}, {0x1D51E, 0x1D539}, {0x1D53B, 0x1D53E},
+ {0x1D540, 0x1D544}, {0x1D546, 0x1D546}, {0x1D54A, 0x1D550},
+ {0x1D552, 0x1D6A5}, {0x1D6A8, 0x1D7CB}, {0x1D7CE, 0x1DA8B},
+ {0x1DA9B, 0x1DA9F}, {0x1DAA1, 0x1DAAF}, {0x1E000, 0x1E006},
+ {0x1E008, 0x1E018}, {0x1E01B, 0x1E021}, {0x1E023, 0x1E024},
+ {0x1E026, 0x1E02A}, {0x1E100, 0x1E12C}, {0x1E130, 0x1E13D},
+ {0x1E140, 0x1E149}, {0x1E14E, 0x1E14F}, {0x1E2C0, 0x1E2F9},
+ {0x1E2FF, 0x1E2FF}, {0x1E800, 0x1E8C4}, {0x1E8C7, 0x1E8D6},
+ {0x1E900, 0x1E94B}, {0x1E950, 0x1E959}, {0x1E95E, 0x1E95F},
+ {0x1EC71, 0x1ECB4}, {0x1ED01, 0x1ED3D}, {0x1EE00, 0x1EE03},
+ {0x1EE05, 0x1EE1F}, {0x1EE21, 0x1EE22}, {0x1EE24, 0x1EE24},
+ {0x1EE27, 0x1EE27}, {0x1EE29, 0x1EE32}, {0x1EE34, 0x1EE37},
+ {0x1EE39, 0x1EE39}, {0x1EE3B, 0x1EE3B}, {0x1EE42, 0x1EE42},
+ {0x1EE47, 0x1EE47}, {0x1EE49, 0x1EE49}, {0x1EE4B, 0x1EE4B},
+ {0x1EE4D, 0x1EE4F}, {0x1EE51, 0x1EE52}, {0x1EE54, 0x1EE54},
+ {0x1EE57, 0x1EE57}, {0x1EE59, 0x1EE59}, {0x1EE5B, 0x1EE5B},
+ {0x1EE5D, 0x1EE5D}, {0x1EE5F, 0x1EE5F}, {0x1EE61, 0x1EE62},
+ {0x1EE64, 0x1EE64}, {0x1EE67, 0x1EE6A}, {0x1EE6C, 0x1EE72},
+ {0x1EE74, 0x1EE77}, {0x1EE79, 0x1EE7C}, {0x1EE7E, 0x1EE7E},
+ {0x1EE80, 0x1EE89}, {0x1EE8B, 0x1EE9B}, {0x1EEA1, 0x1EEA3},
+ {0x1EEA5, 0x1EEA9}, {0x1EEAB, 0x1EEBB}, {0x1EEF0, 0x1EEF1},
+ {0x1F000, 0x1F003}, {0x1F005, 0x1F02B}, {0x1F030, 0x1F093},
+ {0x1F0A0, 0x1F0AE}, {0x1F0B1, 0x1F0BF}, {0x1F0C1, 0x1F0CE},
+ {0x1F0D1, 0x1F0F5}, {0x1F10B, 0x1F10C}, {0x1F12E, 0x1F12F},
+ {0x1F16A, 0x1F16C}, {0x1F1E6, 0x1F1FF}, {0x1F321, 0x1F32C},
+ {0x1F336, 0x1F336}, {0x1F37D, 0x1F37D}, {0x1F394, 0x1F39F},
+ {0x1F3CB, 0x1F3CE}, {0x1F3D4, 0x1F3DF}, {0x1F3F1, 0x1F3F3},
+ {0x1F3F5, 0x1F3F7}, {0x1F43F, 0x1F43F}, {0x1F441, 0x1F441},
+ {0x1F4FD, 0x1F4FE}, {0x1F53E, 0x1F54A}, {0x1F54F, 0x1F54F},
+ {0x1F568, 0x1F579}, {0x1F57B, 0x1F594}, {0x1F597, 0x1F5A3},
+ {0x1F5A5, 0x1F5FA}, {0x1F650, 0x1F67F}, {0x1F6C6, 0x1F6CB},
+ {0x1F6CD, 0x1F6CF}, {0x1F6D3, 0x1F6D4}, {0x1F6E0, 0x1F6EA},
+ {0x1F6F0, 0x1F6F3}, {0x1F700, 0x1F773}, {0x1F780, 0x1F7D8},
+ {0x1F800, 0x1F80B}, {0x1F810, 0x1F847}, {0x1F850, 0x1F859},
+ {0x1F860, 0x1F887}, {0x1F890, 0x1F8AD}, {0x1F900, 0x1F90B},
+ {0x1FA00, 0x1FA53}, {0x1FA60, 0x1FA6D}, {0xE0001, 0xE0001},
+ {0xE0020, 0xE007F},
+}
+
+var emoji = table{
+ {0x203C, 0x203C}, {0x2049, 0x2049}, {0x2122, 0x2122},
+ {0x2139, 0x2139}, {0x2194, 0x2199}, {0x21A9, 0x21AA},
+ {0x231A, 0x231B}, {0x2328, 0x2328}, {0x2388, 0x2388},
+ {0x23CF, 0x23CF}, {0x23E9, 0x23F3}, {0x23F8, 0x23FA},
+ {0x24C2, 0x24C2}, {0x25AA, 0x25AB}, {0x25B6, 0x25B6},
+ {0x25C0, 0x25C0}, {0x25FB, 0x25FE}, {0x2600, 0x2605},
+ {0x2607, 0x2612}, {0x2614, 0x2685}, {0x2690, 0x2705},
+ {0x2708, 0x2712}, {0x2714, 0x2714}, {0x2716, 0x2716},
+ {0x271D, 0x271D}, {0x2721, 0x2721}, {0x2728, 0x2728},
+ {0x2733, 0x2734}, {0x2744, 0x2744}, {0x2747, 0x2747},
+ {0x274C, 0x274C}, {0x274E, 0x274E}, {0x2753, 0x2755},
+ {0x2757, 0x2757}, {0x2763, 0x2767}, {0x2795, 0x2797},
+ {0x27A1, 0x27A1}, {0x27B0, 0x27B0}, {0x27BF, 0x27BF},
+ {0x2934, 0x2935}, {0x2B05, 0x2B07}, {0x2B1B, 0x2B1C},
+ {0x2B50, 0x2B50}, {0x2B55, 0x2B55}, {0x3030, 0x3030},
+ {0x303D, 0x303D}, {0x3297, 0x3297}, {0x3299, 0x3299},
+ {0x1F000, 0x1F0FF}, {0x1F10D, 0x1F10F}, {0x1F12F, 0x1F12F},
+ {0x1F16C, 0x1F171}, {0x1F17E, 0x1F17F}, {0x1F18E, 0x1F18E},
+ {0x1F191, 0x1F19A}, {0x1F1AD, 0x1F1E5}, {0x1F201, 0x1F20F},
+ {0x1F21A, 0x1F21A}, {0x1F22F, 0x1F22F}, {0x1F232, 0x1F23A},
+ {0x1F23C, 0x1F23F}, {0x1F249, 0x1F3FA}, {0x1F400, 0x1F53D},
+ {0x1F546, 0x1F64F}, {0x1F680, 0x1F6FF}, {0x1F774, 0x1F77F},
+ {0x1F7D5, 0x1F7FF}, {0x1F80C, 0x1F80F}, {0x1F848, 0x1F84F},
+ {0x1F85A, 0x1F85F}, {0x1F888, 0x1F88F}, {0x1F8AE, 0x1F8FF},
+ {0x1F90C, 0x1F93A}, {0x1F93C, 0x1F945}, {0x1F947, 0x1FFFD},
+}
diff --git a/vendor/github.com/mattn/go-runewidth/runewidth_windows.go b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
new file mode 100644
index 0000000000..d6a61777d7
--- /dev/null
+++ b/vendor/github.com/mattn/go-runewidth/runewidth_windows.go
@@ -0,0 +1,28 @@
+// +build windows
+// +build !appengine
+
+package runewidth
+
+import (
+ "syscall"
+)
+
+var (
+ kernel32 = syscall.NewLazyDLL("kernel32")
+ procGetConsoleOutputCP = kernel32.NewProc("GetConsoleOutputCP")
+)
+
+// IsEastAsian return true if the current locale is CJK
+func IsEastAsian() bool {
+ r1, _, _ := procGetConsoleOutputCP.Call()
+ if r1 == 0 {
+ return false
+ }
+
+ switch int(r1) {
+ case 932, 51932, 936, 949, 950:
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/mgechev/dots/.travis.yml b/vendor/github.com/mgechev/dots/.travis.yml
new file mode 100644
index 0000000000..f4a4a7363c
--- /dev/null
+++ b/vendor/github.com/mgechev/dots/.travis.yml
@@ -0,0 +1,2 @@
+language: go
+go: master
diff --git a/vendor/github.com/mgechev/dots/LICENSE b/vendor/github.com/mgechev/dots/LICENSE
new file mode 100644
index 0000000000..c617c7e012
--- /dev/null
+++ b/vendor/github.com/mgechev/dots/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Minko Gechev
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mgechev/dots/README.md b/vendor/github.com/mgechev/dots/README.md
new file mode 100644
index 0000000000..1203aef5f7
--- /dev/null
+++ b/vendor/github.com/mgechev/dots/README.md
@@ -0,0 +1,100 @@
+[![Build Status](https://travis-ci.org/mgechev/dots.svg?branch=master)](https://travis-ci.org/mgechev/dots)
+
+# Dots
+
+Implements the wildcard file matching in Go used by golint, go test etc.
+
+## Usage
+
+```go
+import "github.com/mgechev/dots"
+
+func main() {
+ result, err := dots.Resolve([]string{"./fixtures/..."}, []string{"./fixtures/foo"})
+ for _, f := range result {
+ fmt.Println(f);
+ }
+}
+```
+
+If we suppose that we have the following directory structure:
+
+```text
+├── README.md
+├── fixtures
+│   ├── bar
+│   │   ├── bar1.go
+│   │   └── bar2.go
+│   ├── baz
+│   │   ├── baz1.go
+│   │   ├── baz2.go
+│   │   └── baz3.go
+│   └── foo
+│   ├── foo1.go
+│   ├── foo2.go
+│   └── foo3.go
+└── main.go
+```
+
+The result will be:
+
+```text
+fixtures/bar/bar1.go
+fixtures/bar/bar2.go
+fixtures/baz/baz1.go
+fixtures/baz/baz2.go
+fixtures/baz/baz3.go
+```
+
+`dots` supports wildcard in both - the first and the last argument of `Resolve`, which means that you can ignore files based on a wildcard:
+
+```go
+dots.Resolve([]string{"github.com/mgechev/dots"}, []string{"./..."}) // empty list
+dots.Resolve([]string{"./fixtures/bar/..."}, []string{"./fixture/foo/...", "./fixtures/baz/..."}) // bar1.go, bar2.go
+```
+
+## Preserve package structure
+
+`dots` allow you to receive a slice of slices where each nested slice represents an individual package:
+
+```go
+dots.ResolvePackages([]string{"github.com/mgechev/dots/..."}, []string{})
+```
+
+So we will get the result:
+
+```text
+[
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar1.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/bar/bar2.go"
+ ],
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz1.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz2.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/baz/baz3.go"
+ ],
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo1.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo2.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/dummy/foo/foo3.go"
+ ],
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz1.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/baz/baz2.go"
+ ],
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo1.go",
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/foo2.go"
+ ],
+ [
+ "$GOROOT/src/github.com/mgechev/dots/fixtures/pkg/foo/bar/bar1.go"
+ ]
+]
+```
+
+This method is especially useful, when you want to perform type checking over given package from the result.
+
+## License
+
+MIT
diff --git a/vendor/github.com/mgechev/dots/resolve.go b/vendor/github.com/mgechev/dots/resolve.go
new file mode 100644
index 0000000000..309ba18ad2
--- /dev/null
+++ b/vendor/github.com/mgechev/dots/resolve.go
@@ -0,0 +1,456 @@
+package dots
+
+import (
+ "go/build"
+ "log"
+ "os"
+ "path"
+ "path/filepath"
+ "regexp"
+ "runtime"
+ "strings"
+)
+
+var (
+ buildContext = build.Default
+ goroot = filepath.Clean(runtime.GOROOT())
+ gorootSrc = filepath.Join(goroot, "src")
+)
+
+func flatten(arr [][]string) []string {
+ var res []string
+ for _, e := range arr {
+ res = append(res, e...)
+ }
+ return res
+}
+
+// Resolve accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped.
+// The final result is the set of all files from the selected directories subtracted with
+// the files in the skip slice.
+func Resolve(includePatterns, skipPatterns []string) ([]string, error) {
+ skip, err := resolvePatterns(skipPatterns)
+ filter := newPathFilter(flatten(skip))
+ if err != nil {
+ return nil, err
+ }
+
+ pathSet := map[string]bool{}
+ includePackages, err := resolvePatterns(includePatterns)
+ include := flatten(includePackages)
+ if err != nil {
+ return nil, err
+ }
+
+ var result []string
+ for _, i := range include {
+ if _, ok := pathSet[i]; !ok && !filter(i) {
+ pathSet[i] = true
+ result = append(result, i)
+ }
+ }
+ return result, err
+}
+
+// ResolvePackages accepts a slice of paths with optional "..." placeholder and a slice with paths to be skipped.
+// The final result is the set of all files from the selected directories subtracted with
+// the files in the skip slice. The difference between `Resolve` and `ResolvePackages`
+// is that `ResolvePackages` preserves the package structure in the nested slices.
+func ResolvePackages(includePatterns, skipPatterns []string) ([][]string, error) {
+ skip, err := resolvePatterns(skipPatterns)
+ filter := newPathFilter(flatten(skip))
+ if err != nil {
+ return nil, err
+ }
+
+ pathSet := map[string]bool{}
+ include, err := resolvePatterns(includePatterns)
+ if err != nil {
+ return nil, err
+ }
+
+ var result [][]string
+ for _, p := range include {
+ var packageFiles []string
+ for _, f := range p {
+ if _, ok := pathSet[f]; !ok && !filter(f) {
+ pathSet[f] = true
+ packageFiles = append(packageFiles, f)
+ }
+ }
+ result = append(result, packageFiles)
+ }
+ return result, err
+}
+
+func isDir(filename string) bool {
+ fi, err := os.Stat(filename)
+ return err == nil && fi.IsDir()
+}
+
+func exists(filename string) bool {
+ _, err := os.Stat(filename)
+ return err == nil
+}
+
+func resolveDir(dirname string) ([]string, error) {
+ pkg, err := build.ImportDir(dirname, 0)
+ return resolveImportedPackage(pkg, err)
+}
+
+func resolvePackage(pkgname string) ([]string, error) {
+ pkg, err := build.Import(pkgname, ".", 0)
+ return resolveImportedPackage(pkg, err)
+}
+
+func resolveImportedPackage(pkg *build.Package, err error) ([]string, error) {
+ if err != nil {
+ if _, nogo := err.(*build.NoGoError); nogo {
+ // Don't complain if the failure is due to no Go source files.
+ return nil, nil
+ }
+ return nil, err
+ }
+
+ var files []string
+ files = append(files, pkg.GoFiles...)
+ files = append(files, pkg.CgoFiles...)
+ files = append(files, pkg.TestGoFiles...)
+ if pkg.Dir != "." {
+ for i, f := range files {
+ files[i] = filepath.Join(pkg.Dir, f)
+ }
+ }
+ return files, nil
+}
+
+func resolvePatterns(patterns []string) ([][]string, error) {
+ var files [][]string
+ for _, pattern := range patterns {
+ f, err := resolvePattern(pattern)
+ if err != nil {
+ return nil, err
+ }
+ files = append(files, f...)
+ }
+ return files, nil
+}
+
+func resolvePattern(pattern string) ([][]string, error) {
+ // dirsRun, filesRun, and pkgsRun indicate whether golint is applied to
+ // directory, file or package targets. The distinction affects which
+ // checks are run. It is no valid to mix target types.
+ var dirsRun, filesRun, pkgsRun int
+ var matches []string
+
+ if strings.HasSuffix(pattern, "/...") && isDir(pattern[:len(pattern)-len("/...")]) {
+ dirsRun = 1
+ for _, dirname := range matchPackagesInFS(pattern) {
+ matches = append(matches, dirname)
+ }
+ } else if isDir(pattern) {
+ dirsRun = 1
+ matches = append(matches, pattern)
+ } else if exists(pattern) {
+ filesRun = 1
+ matches = append(matches, pattern)
+ } else {
+ pkgsRun = 1
+ matches = append(matches, pattern)
+ }
+
+ result := [][]string{}
+ switch {
+ case dirsRun == 1:
+ for _, dir := range matches {
+ res, err := resolveDir(dir)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, res)
+ }
+ case filesRun == 1:
+ return [][]string{matches}, nil
+ case pkgsRun == 1:
+ for _, pkg := range importPaths(matches) {
+ res, err := resolvePackage(pkg)
+ if err != nil {
+ return nil, err
+ }
+ result = append(result, res)
+ }
+ }
+ return result, nil
+}
+
+func newPathFilter(skip []string) func(string) bool {
+ filter := map[string]bool{}
+ for _, name := range skip {
+ filter[name] = true
+ }
+
+ return func(path string) bool {
+ base := filepath.Base(path)
+ if filter[base] || filter[path] {
+ return true
+ }
+ return base != "." && base != ".." && strings.ContainsAny(base[0:1], "_.")
+ }
+}
+
+// importPathsNoDotExpansion returns the import paths to use for the given
+// command line, but it does no ... expansion.
+func importPathsNoDotExpansion(args []string) []string {
+ if len(args) == 0 {
+ return []string{"."}
+ }
+ var out []string
+ for _, a := range args {
+ // Arguments are supposed to be import paths, but
+ // as a courtesy to Windows developers, rewrite \ to /
+ // in command-line arguments. Handles .\... and so on.
+ if filepath.Separator == '\\' {
+ a = strings.Replace(a, `\`, `/`, -1)
+ }
+
+ // Put argument in canonical form, but preserve leading ./.
+ if strings.HasPrefix(a, "./") {
+ a = "./" + path.Clean(a)
+ if a == "./." {
+ a = "."
+ }
+ } else {
+ a = path.Clean(a)
+ }
+ if a == "all" || a == "std" {
+ out = append(out, matchPackages(a)...)
+ continue
+ }
+ out = append(out, a)
+ }
+ return out
+}
+
+// importPaths returns the import paths to use for the given command line.
+func importPaths(args []string) []string {
+ args = importPathsNoDotExpansion(args)
+ var out []string
+ for _, a := range args {
+ if strings.Contains(a, "...") {
+ if build.IsLocalImport(a) {
+ out = append(out, matchPackagesInFS(a)...)
+ } else {
+ out = append(out, matchPackages(a)...)
+ }
+ continue
+ }
+ out = append(out, a)
+ }
+ return out
+}
+
+// matchPattern(pattern)(name) reports whether
+// name matches pattern. Pattern is a limited glob
+// pattern in which '...' means 'any string' and there
+// is no other special syntax.
+func matchPattern(pattern string) func(name string) bool {
+ re := regexp.QuoteMeta(pattern)
+ re = strings.Replace(re, `\.\.\.`, `.*`, -1)
+ // Special case: foo/... matches foo too.
+ if strings.HasSuffix(re, `/.*`) {
+ re = re[:len(re)-len(`/.*`)] + `(/.*)?`
+ }
+ reg := regexp.MustCompile(`^` + re + `$`)
+ return func(name string) bool {
+ return reg.MatchString(name)
+ }
+}
+
+// hasPathPrefix reports whether the path s begins with the
+// elements in prefix.
+func hasPathPrefix(s, prefix string) bool {
+ switch {
+ default:
+ return false
+ case len(s) == len(prefix):
+ return s == prefix
+ case len(s) > len(prefix):
+ if prefix != "" && prefix[len(prefix)-1] == '/' {
+ return strings.HasPrefix(s, prefix)
+ }
+ return s[len(prefix)] == '/' && s[:len(prefix)] == prefix
+ }
+}
+
+// treeCanMatchPattern(pattern)(name) reports whether
+// name or children of name can possibly match pattern.
+// Pattern is the same limited glob accepted by matchPattern.
+func treeCanMatchPattern(pattern string) func(name string) bool {
+ wildCard := false
+ if i := strings.Index(pattern, "..."); i >= 0 {
+ wildCard = true
+ pattern = pattern[:i]
+ }
+ return func(name string) bool {
+ return len(name) <= len(pattern) && hasPathPrefix(pattern, name) ||
+ wildCard && strings.HasPrefix(name, pattern)
+ }
+}
+
+func matchPackages(pattern string) []string {
+ match := func(string) bool { return true }
+ treeCanMatch := func(string) bool { return true }
+ if pattern != "all" && pattern != "std" {
+ match = matchPattern(pattern)
+ treeCanMatch = treeCanMatchPattern(pattern)
+ }
+
+ have := map[string]bool{
+ "builtin": true, // ignore pseudo-package that exists only for documentation
+ }
+ if !buildContext.CgoEnabled {
+ have["runtime/cgo"] = true // ignore during walk
+ }
+ var pkgs []string
+
+ // Commands
+ cmd := filepath.Join(goroot, "src/cmd") + string(filepath.Separator)
+ filepath.Walk(cmd, func(path string, fi os.FileInfo, err error) error {
+ if err != nil || !fi.IsDir() || path == cmd {
+ return nil
+ }
+ name := path[len(cmd):]
+ if !treeCanMatch(name) {
+ return filepath.SkipDir
+ }
+ // Commands are all in cmd/, not in subdirectories.
+ if strings.Contains(name, string(filepath.Separator)) {
+ return filepath.SkipDir
+ }
+
+ // We use, e.g., cmd/gofmt as the pseudo import path for gofmt.
+ name = "cmd/" + name
+ if have[name] {
+ return nil
+ }
+ have[name] = true
+ if !match(name) {
+ return nil
+ }
+ _, err = buildContext.ImportDir(path, 0)
+ if err != nil {
+ if _, noGo := err.(*build.NoGoError); !noGo {
+ log.Print(err)
+ }
+ return nil
+ }
+ pkgs = append(pkgs, name)
+ return nil
+ })
+
+ for _, src := range buildContext.SrcDirs() {
+ if (pattern == "std" || pattern == "cmd") && src != gorootSrc {
+ continue
+ }
+ src = filepath.Clean(src) + string(filepath.Separator)
+ root := src
+ if pattern == "cmd" {
+ root += "cmd" + string(filepath.Separator)
+ }
+ filepath.Walk(root, func(path string, fi os.FileInfo, err error) error {
+ if err != nil || !fi.IsDir() || path == src {
+ return nil
+ }
+
+ // Avoid .foo, _foo, and testdata directory trees.
+ _, elem := filepath.Split(path)
+ if strings.HasPrefix(elem, ".") || strings.HasPrefix(elem, "_") || elem == "testdata" {
+ return filepath.SkipDir
+ }
+
+ name := filepath.ToSlash(path[len(src):])
+ if pattern == "std" && (strings.Contains(name, ".") || name == "cmd") {
+ // The name "std" is only the standard library.
+ // If the name is cmd, it's the root of the command tree.
+ return filepath.SkipDir
+ }
+ if !treeCanMatch(name) {
+ return filepath.SkipDir
+ }
+ if have[name] {
+ return nil
+ }
+ have[name] = true
+ if !match(name) {
+ return nil
+ }
+ _, err = buildContext.ImportDir(path, 0)
+ if err != nil {
+ if _, noGo := err.(*build.NoGoError); noGo {
+ return nil
+ }
+ }
+ pkgs = append(pkgs, name)
+ return nil
+ })
+ }
+ return pkgs
+}
+
+func matchPackagesInFS(pattern string) []string {
+ // Find directory to begin the scan.
+ // Could be smarter but this one optimization
+ // is enough for now, since ... is usually at the
+ // end of a path.
+ i := strings.Index(pattern, "...")
+ dir, _ := path.Split(pattern[:i])
+
+ // pattern begins with ./ or ../.
+ // path.Clean will discard the ./ but not the ../.
+ // We need to preserve the ./ for pattern matching
+ // and in the returned import paths.
+ prefix := ""
+ if strings.HasPrefix(pattern, "./") {
+ prefix = "./"
+ }
+ match := matchPattern(pattern)
+
+ var pkgs []string
+ filepath.Walk(dir, func(path string, fi os.FileInfo, err error) error {
+ if err != nil || !fi.IsDir() {
+ return nil
+ }
+ if path == dir {
+ // filepath.Walk starts at dir and recurses. For the recursive case,
+ // the path is the result of filepath.Join, which calls filepath.Clean.
+ // The initial case is not Cleaned, though, so we do this explicitly.
+ //
+ // This converts a path like "./io/" to "io". Without this step, running
+ // "cd $GOROOT/src/pkg; go list ./io/..." would incorrectly skip the io
+ // package, because prepending the prefix "./" to the unclean path would
+ // result in "././io", and match("././io") returns false.
+ path = filepath.Clean(path)
+ }
+
+ // Avoid .foo, _foo, and testdata directory trees, but do not avoid "." or "..".
+ _, elem := filepath.Split(path)
+ dot := strings.HasPrefix(elem, ".") && elem != "." && elem != ".."
+ if dot || strings.HasPrefix(elem, "_") || elem == "testdata" {
+ return filepath.SkipDir
+ }
+
+ name := prefix + filepath.ToSlash(path)
+ if !match(name) {
+ return nil
+ }
+ if _, err = build.ImportDir(path, 0); err != nil {
+ if _, noGo := err.(*build.NoGoError); !noGo {
+ log.Print(err)
+ }
+ return nil
+ }
+ pkgs = append(pkgs, name)
+ return nil
+ })
+ return pkgs
+}
diff --git a/vendor/github.com/mgechev/revive/LICENSE b/vendor/github.com/mgechev/revive/LICENSE
new file mode 100644
index 0000000000..c617c7e012
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/LICENSE
@@ -0,0 +1,21 @@
+MIT License
+
+Copyright (c) 2018 Minko Gechev
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/vendor/github.com/mgechev/revive/formatter/checkstyle.go b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
new file mode 100644
index 0000000000..bd20da888c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/checkstyle.go
@@ -0,0 +1,76 @@
+package formatter
+
+import (
+ "bytes"
+ "encoding/xml"
+ "github.com/mgechev/revive/lint"
+ plainTemplate "text/template"
+)
+
+// Checkstyle is an implementation of the Formatter interface
+// which formats the errors to Checkstyle-like format.
+type Checkstyle struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Checkstyle) Name() string {
+ return "checkstyle"
+}
+
+type issue struct {
+ Line int
+ Col int
+ What string
+ Confidence float64
+ Severity lint.Severity
+ RuleName string
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Checkstyle) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+ var issues = map[string][]issue{}
+ for failure := range failures {
+ buf := new(bytes.Buffer)
+ xml.Escape(buf, []byte(failure.Failure))
+ what := buf.String()
+ iss := issue{
+ Line: failure.Position.Start.Line,
+ Col: failure.Position.Start.Column,
+ What: what,
+ Confidence: failure.Confidence,
+ Severity: severity(config, failure),
+ RuleName: failure.RuleName,
+ }
+ fn := failure.GetFilename()
+ if issues[fn] == nil {
+ issues[fn] = make([]issue, 0)
+ }
+ issues[fn] = append(issues[fn], iss)
+ }
+
+ t, err := plainTemplate.New("revive").Parse(checkstyleTemplate)
+ if err != nil {
+ return "", err
+ }
+
+ buf := new(bytes.Buffer)
+
+ err = t.Execute(buf, issues)
+ if err != nil {
+ return "", err
+ }
+
+ return buf.String(), nil
+}
+
+const checkstyleTemplate = `<?xml version='1.0' encoding='UTF-8'?>
+<checkstyle version="5.0">
+{{- range $k, $v := . }}
+ <file name="{{ $k }}">
+ {{- range $i, $issue := $v }}
+ <error line="{{ $issue.Line }}" column="{{ $issue.Col }}" message="{{ $issue.What }} (confidence {{ $issue.Confidence}})" severity="{{ $issue.Severity }}" source="revive/{{ $issue.RuleName }}"/>
+ {{- end }}
+ </file>
+{{- end }}
+</checkstyle>`
diff --git a/vendor/github.com/mgechev/revive/formatter/default.go b/vendor/github.com/mgechev/revive/formatter/default.go
new file mode 100644
index 0000000000..145e6d548e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/default.go
@@ -0,0 +1,26 @@
+package formatter
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// Default is an implementation of the Formatter interface
+// which formats the errors to text.
+type Default struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Default) Name() string {
+ return "default"
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+ for failure := range failures {
+ fmt.Printf("%v: %s\n", failure.Position.Start, failure.Failure)
+ }
+ return "", nil
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/friendly.go b/vendor/github.com/mgechev/revive/formatter/friendly.go
new file mode 100644
index 0000000000..a543eebe00
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/friendly.go
@@ -0,0 +1,146 @@
+package formatter
+
+import (
+ "bytes"
+ "fmt"
+ "sort"
+
+ "github.com/fatih/color"
+ "github.com/mgechev/revive/lint"
+ "github.com/olekukonko/tablewriter"
+)
+
+var (
+ errorEmoji = color.RedString("✘")
+ warningEmoji = color.YellowString("⚠")
+)
+
+var newLines = map[rune]bool{
+ 0x000A: true,
+ 0x000B: true,
+ 0x000C: true,
+ 0x000D: true,
+ 0x0085: true,
+ 0x2028: true,
+ 0x2029: true,
+}
+
+// Friendly is an implementation of the Formatter interface
+// which formats the errors to JSON.
+type Friendly struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Friendly) Name() string {
+ return "friendly"
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+ errorMap := map[string]int{}
+ warningMap := map[string]int{}
+ totalErrors := 0
+ totalWarnings := 0
+ for failure := range failures {
+ sev := severity(config, failure)
+ f.printFriendlyFailure(failure, sev)
+ if sev == lint.SeverityWarning {
+ warningMap[failure.RuleName] = warningMap[failure.RuleName] + 1
+ totalWarnings++
+ }
+ if sev == lint.SeverityError {
+ errorMap[failure.RuleName] = errorMap[failure.RuleName] + 1
+ totalErrors++
+ }
+ }
+ f.printSummary(totalErrors, totalWarnings)
+ f.printStatistics(color.RedString("Errors:"), errorMap)
+ f.printStatistics(color.YellowString("Warnings:"), warningMap)
+ return "", nil
+}
+
+func (f *Friendly) printFriendlyFailure(failure lint.Failure, severity lint.Severity) {
+ f.printHeaderRow(failure, severity)
+ f.printFilePosition(failure)
+ fmt.Println()
+ fmt.Println()
+}
+
+func (f *Friendly) printHeaderRow(failure lint.Failure, severity lint.Severity) {
+ emoji := warningEmoji
+ if severity == lint.SeverityError {
+ emoji = errorEmoji
+ }
+ fmt.Print(f.table([][]string{{emoji, "https://revive.run/r#" + failure.RuleName, color.GreenString(failure.Failure)}}))
+}
+
+func (f *Friendly) printFilePosition(failure lint.Failure) {
+ fmt.Printf(" %s:%d:%d", failure.GetFilename(), failure.Position.Start.Line, failure.Position.Start.Column)
+}
+
+type statEntry struct {
+ name string
+ failures int
+}
+
+func (f *Friendly) printSummary(errors, warnings int) {
+ emoji := warningEmoji
+ if errors > 0 {
+ emoji = errorEmoji
+ }
+ problemsLabel := "problems"
+ if errors+warnings == 1 {
+ problemsLabel = "problem"
+ }
+ warningsLabel := "warnings"
+ if warnings == 1 {
+ warningsLabel = "warning"
+ }
+ errorsLabel := "errors"
+ if errors == 1 {
+ errorsLabel = "error"
+ }
+ str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel)
+ if errors > 0 {
+ fmt.Printf("%s %s\n", emoji, color.RedString(str))
+ fmt.Println()
+ return
+ }
+ if warnings > 0 {
+ fmt.Printf("%s %s\n", emoji, color.YellowString(str))
+ fmt.Println()
+ return
+ }
+}
+
+func (f *Friendly) printStatistics(header string, stats map[string]int) {
+ if len(stats) == 0 {
+ return
+ }
+ var data []statEntry
+ for name, total := range stats {
+ data = append(data, statEntry{name, total})
+ }
+ sort.Slice(data, func(i, j int) bool {
+ return data[i].failures > data[j].failures
+ })
+ formatted := [][]string{}
+ for _, entry := range data {
+ formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name})
+ }
+ fmt.Println(header)
+ fmt.Println(f.table(formatted))
+}
+
+func (f *Friendly) table(rows [][]string) string {
+ buf := new(bytes.Buffer)
+ table := tablewriter.NewWriter(buf)
+ table.SetBorder(false)
+ table.SetColumnSeparator("")
+ table.SetRowSeparator("")
+ table.SetAutoWrapText(false)
+ table.AppendBulk(rows)
+ table.Render()
+ return buf.String()
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/json.go b/vendor/github.com/mgechev/revive/formatter/json.go
new file mode 100644
index 0000000000..9c939face0
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/json.go
@@ -0,0 +1,40 @@
+package formatter
+
+import (
+ "encoding/json"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// JSON is an implementation of the Formatter interface
+// which formats the errors to JSON.
+type JSON struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *JSON) Name() string {
+ return "json"
+}
+
+// jsonObject defines a JSON object of an failure
+type jsonObject struct {
+ Severity lint.Severity
+ lint.Failure `json:",inline"`
+}
+
+// Format formats the failures gotten from the lint.
+func (f *JSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+ var slice []jsonObject
+ for failure := range failures {
+ obj := jsonObject{}
+ obj.Severity = severity(config, failure)
+ obj.Failure = failure
+ slice = append(slice, obj)
+ }
+ result, err := json.Marshal(slice)
+ if err != nil {
+ return "", err
+ }
+ return string(result), err
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/ndjson.go b/vendor/github.com/mgechev/revive/formatter/ndjson.go
new file mode 100644
index 0000000000..aa2b1d6368
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/ndjson.go
@@ -0,0 +1,34 @@
+package formatter
+
+import (
+ "encoding/json"
+ "os"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// NDJSON is an implementation of the Formatter interface
+// which formats the errors to NDJSON stream.
+type NDJSON struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *NDJSON) Name() string {
+ return "ndjson"
+}
+
+// Format formats the failures gotten from the lint.
+func (f *NDJSON) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+ enc := json.NewEncoder(os.Stdout)
+ for failure := range failures {
+ obj := jsonObject{}
+ obj.Severity = severity(config, failure)
+ obj.Failure = failure
+ err := enc.Encode(obj)
+ if err != nil {
+ return "", err
+ }
+ }
+ return "", nil
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/plain.go b/vendor/github.com/mgechev/revive/formatter/plain.go
new file mode 100644
index 0000000000..a854d25629
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/plain.go
@@ -0,0 +1,26 @@
+package formatter
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// Plain is an implementation of the Formatter interface
+// which formats the errors to JSON.
+type Plain struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Plain) Name() string {
+ return "plain"
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+ for failure := range failures {
+ fmt.Printf("%v: %s %s\n", failure.Position.Start, failure.Failure, "https://revive.run/r#"+failure.RuleName)
+ }
+ return "", nil
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/severity.go b/vendor/github.com/mgechev/revive/formatter/severity.go
new file mode 100644
index 0000000000..a43bf31923
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/severity.go
@@ -0,0 +1,13 @@
+package formatter
+
+import "github.com/mgechev/revive/lint"
+
+func severity(config lint.Config, failure lint.Failure) lint.Severity {
+ if config, ok := config.Rules[failure.RuleName]; ok && config.Severity == lint.SeverityError {
+ return lint.SeverityError
+ }
+ if config, ok := config.Directives[failure.RuleName]; ok && config.Severity == lint.SeverityError {
+ return lint.SeverityError
+ }
+ return lint.SeverityWarning
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/stylish.go b/vendor/github.com/mgechev/revive/formatter/stylish.go
new file mode 100644
index 0000000000..cd81fdae7e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/stylish.go
@@ -0,0 +1,89 @@
+package formatter
+
+import (
+ "bytes"
+ "fmt"
+
+ "github.com/fatih/color"
+ "github.com/mgechev/revive/lint"
+ "github.com/olekukonko/tablewriter"
+)
+
+// Stylish is an implementation of the Formatter interface
+// which formats the errors to JSON.
+type Stylish struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Stylish) Name() string {
+ return "stylish"
+}
+
+func formatFailure(failure lint.Failure, severity lint.Severity) []string {
+ fString := color.CyanString(failure.Failure)
+ fName := color.RedString("https://revive.run/r#" + failure.RuleName)
+ lineColumn := failure.Position
+ pos := fmt.Sprintf("(%d, %d)", lineColumn.Start.Line, lineColumn.Start.Column)
+ if severity == lint.SeverityWarning {
+ fName = color.YellowString("https://revive.run/r#" + failure.RuleName)
+ }
+ return []string{failure.GetFilename(), pos, fName, fString}
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Stylish) Format(failures <-chan lint.Failure, config lint.Config) (string, error) {
+ var result [][]string
+ var totalErrors = 0
+ var total = 0
+
+ for f := range failures {
+ total++
+ currentType := severity(config, f)
+ if currentType == lint.SeverityError {
+ totalErrors++
+ }
+ result = append(result, formatFailure(f, lint.Severity(currentType)))
+ }
+ ps := "problems"
+ if total == 1 {
+ ps = "problem"
+ }
+
+ fileReport := make(map[string][][]string)
+
+ for _, row := range result {
+ if _, ok := fileReport[row[0]]; !ok {
+ fileReport[row[0]] = [][]string{}
+ }
+
+ fileReport[row[0]] = append(fileReport[row[0]], []string{row[1], row[2], row[3]})
+ }
+
+ output := ""
+ for filename, val := range fileReport {
+ buf := new(bytes.Buffer)
+ table := tablewriter.NewWriter(buf)
+ table.SetBorder(false)
+ table.SetColumnSeparator("")
+ table.SetRowSeparator("")
+ table.SetAutoWrapText(false)
+ table.AppendBulk(val)
+ table.Render()
+ c := color.New(color.Underline)
+ output += c.SprintfFunc()(filename + "\n")
+ output += buf.String() + "\n"
+ }
+
+ suffix := fmt.Sprintf(" %d %s (%d errors) (%d warnings)", total, ps, totalErrors, total-totalErrors)
+
+ if total > 0 && totalErrors > 0 {
+ suffix = color.RedString("\n ✖" + suffix)
+ } else if total > 0 && totalErrors == 0 {
+ suffix = color.YellowString("\n ✖" + suffix)
+ } else {
+ suffix, output = "", ""
+ }
+
+ return output + suffix, nil
+}
diff --git a/vendor/github.com/mgechev/revive/formatter/unix.go b/vendor/github.com/mgechev/revive/formatter/unix.go
new file mode 100644
index 0000000000..b9ae62d38d
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/formatter/unix.go
@@ -0,0 +1,27 @@
+package formatter
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// Unix is an implementation of the Formatter interface
+// which formats the errors to a simple line based error format
+// main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...)
+type Unix struct {
+ Metadata lint.FormatterMetadata
+}
+
+// Name returns the name of the formatter
+func (f *Unix) Name() string {
+ return "unix"
+}
+
+// Format formats the failures gotten from the lint.
+func (f *Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
+ for failure := range failures {
+ fmt.Printf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure)
+ }
+ return "", nil
+}
diff --git a/vendor/github.com/mgechev/revive/lint/config.go b/vendor/github.com/mgechev/revive/lint/config.go
new file mode 100644
index 0000000000..fe65ace522
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/config.go
@@ -0,0 +1,32 @@
+package lint
+
+// Arguments is type used for the arguments of a rule.
+type Arguments = []interface{}
+
+// RuleConfig is type used for the rule configuration.
+type RuleConfig struct {
+ Arguments Arguments
+ Severity Severity
+}
+
+// RulesConfig defines the config for all rules.
+type RulesConfig = map[string]RuleConfig
+
+// DirectiveConfig is type used for the linter directive configuration.
+type DirectiveConfig struct {
+ Severity Severity
+}
+
+// DirectivesConfig defines the config for all directives.
+type DirectivesConfig = map[string]DirectiveConfig
+
+// Config defines the config of the linter.
+type Config struct {
+ IgnoreGeneratedHeader bool `toml:"ignoreGeneratedHeader"`
+ Confidence float64
+ Severity Severity
+ Rules RulesConfig `toml:"rule"`
+ ErrorCode int `toml:"errorCode"`
+ WarningCode int `toml:"warningCode"`
+ Directives DirectivesConfig `toml:"directive"`
+}
diff --git a/vendor/github.com/mgechev/revive/lint/failure.go b/vendor/github.com/mgechev/revive/lint/failure.go
new file mode 100644
index 0000000000..479b0cb48b
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/failure.go
@@ -0,0 +1,39 @@
+package lint
+
+import (
+ "go/ast"
+ "go/token"
+)
+
+const (
+ // SeverityWarning declares failures of type warning
+ SeverityWarning = "warning"
+ // SeverityError declares failures of type error.
+ SeverityError = "error"
+)
+
+// Severity is the type for the failure types.
+type Severity string
+
+// FailurePosition returns the failure position
+type FailurePosition struct {
+ Start token.Position
+ End token.Position
+}
+
+// Failure defines a struct for a linting failure.
+type Failure struct {
+ Failure string
+ RuleName string
+ Category string
+ Position FailurePosition
+ Node ast.Node `json:"-"`
+ Confidence float64
+ // For future use
+ ReplacementLine string
+}
+
+// GetFilename returns the filename.
+func (f *Failure) GetFilename() string {
+ return f.Position.Start.Filename
+}
diff --git a/vendor/github.com/mgechev/revive/lint/file.go b/vendor/github.com/mgechev/revive/lint/file.go
new file mode 100644
index 0000000000..8bef9c220c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/file.go
@@ -0,0 +1,278 @@
+package lint
+
+import (
+ "bytes"
+ "go/ast"
+ "go/parser"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "math"
+ "regexp"
+ "strings"
+)
+
+// File abstraction used for representing files.
+type File struct {
+ Name string
+ Pkg *Package
+ content []byte
+ AST *ast.File
+}
+
+// IsTest returns if the file contains tests.
+func (f *File) IsTest() bool { return strings.HasSuffix(f.Name, "_test.go") }
+
+// Content returns the file's content.
+func (f *File) Content() []byte {
+ return f.content
+}
+
+// NewFile creates a new file
+func NewFile(name string, content []byte, pkg *Package) (*File, error) {
+ f, err := parser.ParseFile(pkg.fset, name, content, parser.ParseComments)
+ if err != nil {
+ return nil, err
+ }
+ return &File{
+ Name: name,
+ content: content,
+ Pkg: pkg,
+ AST: f,
+ }, nil
+}
+
+// ToPosition returns line and column for given position.
+func (f *File) ToPosition(pos token.Pos) token.Position {
+ return f.Pkg.fset.Position(pos)
+}
+
+// Render renters a node.
+func (f *File) Render(x interface{}) string {
+ var buf bytes.Buffer
+ if err := printer.Fprint(&buf, f.Pkg.fset, x); err != nil {
+ panic(err)
+ }
+ return buf.String()
+}
+
+// CommentMap builds a comment map for the file.
+func (f *File) CommentMap() ast.CommentMap {
+ return ast.NewCommentMap(f.Pkg.fset, f.AST, f.AST.Comments)
+}
+
+var basicTypeKinds = map[types.BasicKind]string{
+ types.UntypedBool: "bool",
+ types.UntypedInt: "int",
+ types.UntypedRune: "rune",
+ types.UntypedFloat: "float64",
+ types.UntypedComplex: "complex128",
+ types.UntypedString: "string",
+}
+
+// IsUntypedConst reports whether expr is an untyped constant,
+// and indicates what its default type is.
+// scope may be nil.
+func (f *File) IsUntypedConst(expr ast.Expr) (defType string, ok bool) {
+ // Re-evaluate expr outside of its context to see if it's untyped.
+ // (An expr evaluated within, for example, an assignment context will get the type of the LHS.)
+ exprStr := f.Render(expr)
+ tv, err := types.Eval(f.Pkg.fset, f.Pkg.TypesPkg, expr.Pos(), exprStr)
+ if err != nil {
+ return "", false
+ }
+ if b, ok := tv.Type.(*types.Basic); ok {
+ if dt, ok := basicTypeKinds[b.Kind()]; ok {
+ return dt, true
+ }
+ }
+
+ return "", false
+}
+
+func (f *File) isMain() bool {
+ if f.AST.Name.Name == "main" {
+ return true
+ }
+ return false
+}
+
+const directiveSpecifyDisableReason = "specify-disable-reason"
+
+func (f *File) lint(rules []Rule, config Config, failures chan Failure) {
+ rulesConfig := config.Rules
+ _, mustSpecifyDisableReason := config.Directives[directiveSpecifyDisableReason]
+ disabledIntervals := f.disabledIntervals(rules, mustSpecifyDisableReason, failures)
+ for _, currentRule := range rules {
+ ruleConfig := rulesConfig[currentRule.Name()]
+ currentFailures := currentRule.Apply(f, ruleConfig.Arguments)
+ for idx, failure := range currentFailures {
+ if failure.RuleName == "" {
+ failure.RuleName = currentRule.Name()
+ }
+ if failure.Node != nil {
+ failure.Position = ToFailurePosition(failure.Node.Pos(), failure.Node.End(), f)
+ }
+ currentFailures[idx] = failure
+ }
+ currentFailures = f.filterFailures(currentFailures, disabledIntervals)
+ for _, failure := range currentFailures {
+ if failure.Confidence >= config.Confidence {
+ failures <- failure
+ }
+ }
+ }
+}
+
+type enableDisableConfig struct {
+ enabled bool
+ position int
+}
+
+const directiveRE = `^//[\s]*revive:(enable|disable)(?:-(line|next-line))?(?::([^\s]+))?[\s]*(?: (.+))?$`
+const directivePos = 1
+const modifierPos = 2
+const rulesPos = 3
+const reasonPos = 4
+
+var re = regexp.MustCompile(directiveRE)
+
+func (f *File) disabledIntervals(rules []Rule, mustSpecifyDisableReason bool, failures chan Failure) disabledIntervalsMap {
+ enabledDisabledRulesMap := make(map[string][]enableDisableConfig)
+
+ getEnabledDisabledIntervals := func() disabledIntervalsMap {
+ result := make(disabledIntervalsMap)
+
+ for ruleName, disabledArr := range enabledDisabledRulesMap {
+ ruleResult := []DisabledInterval{}
+ for i := 0; i < len(disabledArr); i++ {
+ interval := DisabledInterval{
+ RuleName: ruleName,
+ From: token.Position{
+ Filename: f.Name,
+ Line: disabledArr[i].position,
+ },
+ To: token.Position{
+ Filename: f.Name,
+ Line: math.MaxInt32,
+ },
+ }
+ if i%2 == 0 {
+ ruleResult = append(ruleResult, interval)
+ } else {
+ ruleResult[len(ruleResult)-1].To.Line = disabledArr[i].position
+ }
+ }
+ result[ruleName] = ruleResult
+ }
+
+ return result
+ }
+
+ handleConfig := func(isEnabled bool, line int, name string) {
+ existing, ok := enabledDisabledRulesMap[name]
+ if !ok {
+ existing = []enableDisableConfig{}
+ enabledDisabledRulesMap[name] = existing
+ }
+ if (len(existing) > 1 && existing[len(existing)-1].enabled == isEnabled) ||
+ (len(existing) == 0 && isEnabled) {
+ return
+ }
+ existing = append(existing, enableDisableConfig{
+ enabled: isEnabled,
+ position: line,
+ })
+ enabledDisabledRulesMap[name] = existing
+ }
+
+ handleRules := func(filename, modifier string, isEnabled bool, line int, ruleNames []string) []DisabledInterval {
+ var result []DisabledInterval
+ for _, name := range ruleNames {
+ if modifier == "line" {
+ handleConfig(isEnabled, line, name)
+ handleConfig(!isEnabled, line, name)
+ } else if modifier == "next-line" {
+ handleConfig(isEnabled, line+1, name)
+ handleConfig(!isEnabled, line+1, name)
+ } else {
+ handleConfig(isEnabled, line, name)
+ }
+ }
+ return result
+ }
+
+ handleComment := func(filename string, c *ast.CommentGroup, line int) {
+ comments := c.List
+ for _, c := range comments {
+ match := re.FindStringSubmatch(c.Text)
+ if len(match) == 0 {
+ return
+ }
+
+ ruleNames := []string{}
+ tempNames := strings.Split(match[rulesPos], ",")
+ for _, name := range tempNames {
+ name = strings.Trim(name, "\n")
+ if len(name) > 0 {
+ ruleNames = append(ruleNames, name)
+ }
+ }
+
+ mustCheckDisablingReason := mustSpecifyDisableReason && match[directivePos] == "disable"
+ if mustCheckDisablingReason && strings.Trim(match[reasonPos], " ") == "" {
+ failures <- Failure{
+ Confidence: 1,
+ RuleName: directiveSpecifyDisableReason,
+ Failure: "reason of lint disabling not found",
+ Position: ToFailurePosition(c.Pos(), c.End(), f),
+ Node: c,
+ }
+ continue // skip this linter disabling directive
+ }
+
+ // TODO: optimize
+ if len(ruleNames) == 0 {
+ for _, rule := range rules {
+ ruleNames = append(ruleNames, rule.Name())
+ }
+ }
+
+ handleRules(filename, match[modifierPos], match[directivePos] == "enable", line, ruleNames)
+ }
+ }
+
+ comments := f.AST.Comments
+ for _, c := range comments {
+ handleComment(f.Name, c, f.ToPosition(c.End()).Line)
+ }
+
+ return getEnabledDisabledIntervals()
+}
+
+func (f *File) filterFailures(failures []Failure, disabledIntervals disabledIntervalsMap) []Failure {
+ result := []Failure{}
+ for _, failure := range failures {
+ fStart := failure.Position.Start.Line
+ fEnd := failure.Position.End.Line
+ intervals, ok := disabledIntervals[failure.RuleName]
+ if !ok {
+ result = append(result, failure)
+ } else {
+ include := true
+ for _, interval := range intervals {
+ intStart := interval.From.Line
+ intEnd := interval.To.Line
+ if (fStart >= intStart && fStart <= intEnd) ||
+ (fEnd >= intStart && fEnd <= intEnd) {
+ include = false
+ break
+ }
+ }
+ if include {
+ result = append(result, failure)
+ }
+ }
+ }
+ return result
+}
diff --git a/vendor/github.com/mgechev/revive/lint/formatter.go b/vendor/github.com/mgechev/revive/lint/formatter.go
new file mode 100644
index 0000000000..7c19af278a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/formatter.go
@@ -0,0 +1,14 @@
+package lint
+
+// FormatterMetadata configuration of a formatter
+type FormatterMetadata struct {
+ Name string
+ Description string
+ Sample string
+}
+
+// Formatter defines an interface for failure formatters
+type Formatter interface {
+ Format(<-chan Failure, Config) (string, error)
+ Name() string
+}
diff --git a/vendor/github.com/mgechev/revive/lint/linter.go b/vendor/github.com/mgechev/revive/lint/linter.go
new file mode 100644
index 0000000000..cdca84fb56
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/linter.go
@@ -0,0 +1,99 @@
+package lint
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/token"
+ "os"
+ "sync"
+)
+
+// ReadFile defines an abstraction for reading files.
+type ReadFile func(path string) (result []byte, err error)
+
+type disabledIntervalsMap = map[string][]DisabledInterval
+
+// Linter is used for linting set of files.
+type Linter struct {
+ reader ReadFile
+}
+
+// New creates a new Linter
+func New(reader ReadFile) Linter {
+ return Linter{reader: reader}
+}
+
+var (
+ genHdr = []byte("// Code generated ")
+ genFtr = []byte(" DO NOT EDIT.")
+)
+
+// Lint lints a set of files with the specified rule.
+func (l *Linter) Lint(packages [][]string, ruleSet []Rule, config Config) (<-chan Failure, error) {
+ failures := make(chan Failure)
+
+ var wg sync.WaitGroup
+ for _, pkg := range packages {
+ wg.Add(1)
+ go func(pkg []string) {
+ if err := l.lintPackage(pkg, ruleSet, config, failures); err != nil {
+ fmt.Fprintln(os.Stderr, err)
+ os.Exit(1)
+ }
+ defer wg.Done()
+ }(pkg)
+ }
+
+ go func() {
+ wg.Wait()
+ close(failures)
+ }()
+
+ return failures, nil
+}
+
+func (l *Linter) lintPackage(filenames []string, ruleSet []Rule, config Config, failures chan Failure) error {
+ pkg := &Package{
+ fset: token.NewFileSet(),
+ files: map[string]*File{},
+ mu: sync.Mutex{},
+ }
+ for _, filename := range filenames {
+ content, err := l.reader(filename)
+ if err != nil {
+ return err
+ }
+ if isGenerated(content) && !config.IgnoreGeneratedHeader {
+ continue
+ }
+
+ file, err := NewFile(filename, content, pkg)
+ if err != nil {
+ return err
+ }
+ pkg.files[filename] = file
+ }
+
+ if len(pkg.files) == 0 {
+ return nil
+ }
+
+ pkg.lint(ruleSet, config, failures)
+
+ return nil
+}
+
+// isGenerated reports whether the source file is generated code
+// according the rules from https://golang.org/s/generatedcode.
+// This is inherited from the original go lint.
+func isGenerated(src []byte) bool {
+ sc := bufio.NewScanner(bytes.NewReader(src))
+ for sc.Scan() {
+ b := sc.Bytes()
+ if bytes.HasPrefix(b, genHdr) && bytes.HasSuffix(b, genFtr) && len(b) >= len(genHdr)+len(genFtr) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/mgechev/revive/lint/package.go b/vendor/github.com/mgechev/revive/lint/package.go
new file mode 100644
index 0000000000..7b6046fd7e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/package.go
@@ -0,0 +1,178 @@
+package lint
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+ "sync"
+
+ "golang.org/x/tools/go/gcexportdata"
+)
+
+// Package represents a package in the project.
+type Package struct {
+ fset *token.FileSet
+ files map[string]*File
+
+ TypesPkg *types.Package
+ TypesInfo *types.Info
+
+ // sortable is the set of types in the package that implement sort.Interface.
+ Sortable map[string]bool
+ // main is whether this is a "main" package.
+ main int
+ mu sync.Mutex
+}
+
+var newImporter = func(fset *token.FileSet) types.ImporterFrom {
+ return gcexportdata.NewImporter(fset, make(map[string]*types.Package))
+}
+
+var (
+ trueValue = 1
+ falseValue = 2
+ notSet = 3
+)
+
+// IsMain returns if that's the main package.
+func (p *Package) IsMain() bool {
+ if p.main == trueValue {
+ return true
+ } else if p.main == falseValue {
+ return false
+ }
+ for _, f := range p.files {
+ if f.isMain() {
+ p.main = trueValue
+ return true
+ }
+ }
+ p.main = falseValue
+ return false
+}
+
+// TypeCheck performs type checking for given package.
+func (p *Package) TypeCheck() error {
+ p.mu.Lock()
+ // If type checking has already been performed
+ // skip it.
+ if p.TypesInfo != nil || p.TypesPkg != nil {
+ p.mu.Unlock()
+ return nil
+ }
+ config := &types.Config{
+ // By setting a no-op error reporter, the type checker does as much work as possible.
+ Error: func(error) {},
+ Importer: newImporter(p.fset),
+ }
+ info := &types.Info{
+ Types: make(map[ast.Expr]types.TypeAndValue),
+ Defs: make(map[*ast.Ident]types.Object),
+ Uses: make(map[*ast.Ident]types.Object),
+ Scopes: make(map[ast.Node]*types.Scope),
+ }
+ var anyFile *File
+ var astFiles []*ast.File
+ for _, f := range p.files {
+ anyFile = f
+ astFiles = append(astFiles, f.AST)
+ }
+
+ typesPkg, err := check(config, anyFile.AST.Name.Name, p.fset, astFiles, info)
+
+ // Remember the typechecking info, even if config.Check failed,
+ // since we will get partial information.
+ p.TypesPkg = typesPkg
+ p.TypesInfo = info
+ p.mu.Unlock()
+ return err
+}
+
+// check function encapsulates the call to go/types.Config.Check method and
+// recovers if the called method panics (see issue #59)
+func check(config *types.Config, n string, fset *token.FileSet, astFiles []*ast.File, info *types.Info) (p *types.Package, err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ err, _ = r.(error)
+ p = nil
+ return
+ }
+ }()
+
+ return config.Check(n, fset, astFiles, info)
+}
+
+// TypeOf returns the type of an expression.
+func (p *Package) TypeOf(expr ast.Expr) types.Type {
+ if p.TypesInfo == nil {
+ return nil
+ }
+ return p.TypesInfo.TypeOf(expr)
+}
+
+type walker struct {
+ nmap map[string]int
+ has map[string]int
+}
+
+func (w *walker) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
+ return w
+ }
+ // TODO(dsymonds): We could check the signature to be more precise.
+ recv := receiverType(fn)
+ if i, ok := w.nmap[fn.Name.Name]; ok {
+ w.has[recv] |= i
+ }
+ return w
+}
+
+func (p *Package) scanSortable() {
+ p.Sortable = make(map[string]bool)
+
+ // bitfield for which methods exist on each type.
+ const (
+ Len = 1 << iota
+ Less
+ Swap
+ )
+ nmap := map[string]int{"Len": Len, "Less": Less, "Swap": Swap}
+ has := make(map[string]int)
+ for _, f := range p.files {
+ ast.Walk(&walker{nmap, has}, f.AST)
+ }
+ for typ, ms := range has {
+ if ms == Len|Less|Swap {
+ p.Sortable[typ] = true
+ }
+ }
+}
+
+// receiverType returns the named type of the method receiver, sans "*",
+// or "invalid-type" if fn.Recv is ill formed.
+func receiverType(fn *ast.FuncDecl) string {
+ switch e := fn.Recv.List[0].Type.(type) {
+ case *ast.Ident:
+ return e.Name
+ case *ast.StarExpr:
+ if id, ok := e.X.(*ast.Ident); ok {
+ return id.Name
+ }
+ }
+ // The parser accepts much more than just the legal forms.
+ return "invalid-type"
+}
+
+func (p *Package) lint(rules []Rule, config Config, failures chan Failure) {
+ p.scanSortable()
+ var wg sync.WaitGroup
+ for _, file := range p.files {
+ wg.Add(1)
+ go (func(file *File) {
+ file.lint(rules, config, failures)
+ defer wg.Done()
+ })(file)
+ }
+ wg.Wait()
+}
diff --git a/vendor/github.com/mgechev/revive/lint/rule.go b/vendor/github.com/mgechev/revive/lint/rule.go
new file mode 100644
index 0000000000..815abfdd88
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/rule.go
@@ -0,0 +1,31 @@
+package lint
+
+import (
+ "go/token"
+)
+
+// DisabledInterval contains a single disabled interval and the associated rule name.
+type DisabledInterval struct {
+ From token.Position
+ To token.Position
+ RuleName string
+}
+
+// Rule defines an abstract rule interaface
+type Rule interface {
+ Name() string
+ Apply(*File, Arguments) []Failure
+}
+
+// AbstractRule defines an abstract rule.
+type AbstractRule struct {
+ Failures []Failure
+}
+
+// ToFailurePosition returns the failure position.
+func ToFailurePosition(start token.Pos, end token.Pos, file *File) FailurePosition {
+ return FailurePosition{
+ Start: file.ToPosition(start),
+ End: file.ToPosition(end),
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/lint/utils.go b/vendor/github.com/mgechev/revive/lint/utils.go
new file mode 100644
index 0000000000..28657c6df0
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/lint/utils.go
@@ -0,0 +1,128 @@
+package lint
+
+import (
+ "strings"
+ "unicode"
+)
+
+// Name returns a different name if it should be different.
+func Name(name string, whitelist, blacklist []string) (should string) {
+ // Fast path for simple cases: "_" and all lowercase.
+ if name == "_" {
+ return name
+ }
+ allLower := true
+ for _, r := range name {
+ if !unicode.IsLower(r) {
+ allLower = false
+ break
+ }
+ }
+ if allLower {
+ return name
+ }
+
+ // Split camelCase at any lower->upper transition, and split on underscores.
+ // Check each word for common initialisms.
+ runes := []rune(name)
+ w, i := 0, 0 // index of start of word, scan
+ for i+1 <= len(runes) {
+ eow := false // whether we hit the end of a word
+ if i+1 == len(runes) {
+ eow = true
+ } else if runes[i+1] == '_' {
+ // underscore; shift the remainder forward over any run of underscores
+ eow = true
+ n := 1
+ for i+n+1 < len(runes) && runes[i+n+1] == '_' {
+ n++
+ }
+
+ // Leave at most one underscore if the underscore is between two digits
+ if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
+ n--
+ }
+
+ copy(runes[i+1:], runes[i+n+1:])
+ runes = runes[:len(runes)-n]
+ } else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
+ // lower->non-lower
+ eow = true
+ }
+ i++
+ if !eow {
+ continue
+ }
+
+ // [w,i) is a word.
+ word := string(runes[w:i])
+ ignoreInitWarnings := map[string]bool{}
+ for _, i := range whitelist {
+ ignoreInitWarnings[i] = true
+ }
+
+ extraInits := map[string]bool{}
+ for _, i := range blacklist {
+ extraInits[i] = true
+ }
+
+ if u := strings.ToUpper(word); (commonInitialisms[u] || extraInits[u]) && !ignoreInitWarnings[u] {
+ // Keep consistent case, which is lowercase only at the start.
+ if w == 0 && unicode.IsLower(runes[w]) {
+ u = strings.ToLower(u)
+ }
+ // All the common initialisms are ASCII,
+ // so we can replace the bytes exactly.
+ copy(runes[w:], []rune(u))
+ } else if w > 0 && strings.ToLower(word) == word {
+ // already all lowercase, and not the first word, so uppercase the first character.
+ runes[w] = unicode.ToUpper(runes[w])
+ }
+ w = i
+ }
+ return string(runes)
+}
+
+// commonInitialisms is a set of common initialisms.
+// Only add entries that are highly unlikely to be non-initialisms.
+// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
+var commonInitialisms = map[string]bool{
+ "ACL": true,
+ "API": true,
+ "ASCII": true,
+ "CPU": true,
+ "CSS": true,
+ "DNS": true,
+ "EOF": true,
+ "GUID": true,
+ "HTML": true,
+ "HTTP": true,
+ "HTTPS": true,
+ "ID": true,
+ "IP": true,
+ "JSON": true,
+ "LHS": true,
+ "QPS": true,
+ "RAM": true,
+ "RHS": true,
+ "RPC": true,
+ "SLA": true,
+ "SMTP": true,
+ "SQL": true,
+ "SSH": true,
+ "TCP": true,
+ "TLS": true,
+ "TTL": true,
+ "UDP": true,
+ "UI": true,
+ "UID": true,
+ "UUID": true,
+ "URI": true,
+ "URL": true,
+ "UTF8": true,
+ "VM": true,
+ "XML": true,
+ "XMPP": true,
+ "XSRF": true,
+ "XSS": true,
+}
diff --git a/vendor/github.com/mgechev/revive/rule/add-constant.go b/vendor/github.com/mgechev/revive/rule/add-constant.go
new file mode 100644
index 0000000000..881bbd073f
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/add-constant.go
@@ -0,0 +1,151 @@
+package rule
+
+import (
+ "fmt"
+ "github.com/mgechev/revive/lint"
+ "go/ast"
+ "strconv"
+ "strings"
+)
+
+const (
+ defaultStrLitLimit = 2
+ kindFLOAT = "FLOAT"
+ kindINT = "INT"
+ kindSTRING = "STRING"
+)
+
+type whiteList map[string]map[string]bool
+
+func newWhiteList() whiteList {
+ return map[string]map[string]bool{kindINT: map[string]bool{}, kindFLOAT: map[string]bool{}, kindSTRING: map[string]bool{}}
+}
+
+func (wl whiteList) add(kind string, list string) {
+ elems := strings.Split(list, ",")
+ for _, e := range elems {
+ wl[kind][e] = true
+ }
+}
+
+// AddConstantRule lints unused params in functions.
+type AddConstantRule struct{}
+
+// Apply applies the rule to given file.
+func (r *AddConstantRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ strLitLimit := defaultStrLitLimit
+ var whiteList = newWhiteList()
+ if len(arguments) > 0 {
+ args, ok := arguments[0].(map[string]interface{})
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the add-constant rule. Expecting a k,v map, got %T", arguments[0]))
+ }
+ for k, v := range args {
+ kind := ""
+ switch k {
+ case "allowFloats":
+ kind = kindFLOAT
+ fallthrough
+ case "allowInts":
+ if kind == "" {
+ kind = kindINT
+ }
+ fallthrough
+ case "allowStrs":
+ if kind == "" {
+ kind = kindSTRING
+ }
+ list, ok := v.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the add-constant rule, string expected. Got '%v' (%T)", v, v))
+ }
+ whiteList.add(kind, list)
+ case "maxLitCount":
+ sl, ok := v.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v' (%T)", v, v))
+ }
+
+ limit, err := strconv.Atoi(sl)
+ if err != nil {
+ panic(fmt.Sprintf("Invalid argument to the add-constant rule, expecting string representation of an integer. Got '%v'", v))
+ }
+ strLitLimit = limit
+ }
+ }
+ }
+
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintAddConstantRule{onFailure: onFailure, strLits: make(map[string]int, 0), strLitLimit: strLitLimit, whiteLst: whiteList}
+
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *AddConstantRule) Name() string {
+ return "add-constant"
+}
+
+type lintAddConstantRule struct {
+ onFailure func(lint.Failure)
+ strLits map[string]int
+ strLitLimit int
+ whiteLst whiteList
+}
+
+func (w lintAddConstantRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.GenDecl:
+ return nil // skip declarations
+ case *ast.BasicLit:
+ switch kind := n.Kind.String(); kind {
+ case kindFLOAT, kindINT:
+ w.checkNumLit(kind, n)
+ case kindSTRING:
+ w.checkStrLit(n)
+ }
+ }
+
+ return w
+
+}
+
+func (w lintAddConstantRule) checkStrLit(n *ast.BasicLit) {
+ if w.whiteLst[kindSTRING][n.Value] {
+ return
+ }
+
+ count := w.strLits[n.Value]
+ if count >= 0 {
+ w.strLits[n.Value] = count + 1
+ if w.strLits[n.Value] > w.strLitLimit {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: n,
+ Category: "style",
+ Failure: fmt.Sprintf("string literal %s appears, at least, %d times, create a named constant for it", n.Value, w.strLits[n.Value]),
+ })
+ w.strLits[n.Value] = -1 // mark it to avoid failing again on the same literal
+ }
+ }
+}
+
+func (w lintAddConstantRule) checkNumLit(kind string, n *ast.BasicLit) {
+ if w.whiteLst[kind][n.Value] {
+ return
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: n,
+ Category: "style",
+ Failure: fmt.Sprintf("avoid magic numbers like '%s', create a named constant for it", n.Value),
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/argument-limit.go b/vendor/github.com/mgechev/revive/rule/argument-limit.go
new file mode 100644
index 0000000000..2b11d49825
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/argument-limit.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ArgumentsLimitRule lints given else constructs.
+type ArgumentsLimitRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ArgumentsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ if len(arguments) != 1 {
+ panic(`invalid configuration for "argument-limit"`)
+ }
+
+ total, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(`invalid value passed as argument number to the "argument-list" rule`)
+ }
+
+ var failures []lint.Failure
+
+ walker := lintArgsNum{
+ total: int(total),
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ArgumentsLimitRule) Name() string {
+ return "argument-limit"
+}
+
+type lintArgsNum struct {
+ total int
+ onFailure func(lint.Failure)
+}
+
+func (w lintArgsNum) Visit(n ast.Node) ast.Visitor {
+ node, ok := n.(*ast.FuncDecl)
+ if ok {
+ num := 0
+ for _, l := range node.Type.Params.List {
+ for range l.Names {
+ num++
+ }
+ }
+ if num > w.total {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of arguments per function exceeded; max %d but got %d", w.total, num),
+ Node: node.Type,
+ })
+ return w
+ }
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/atomic.go b/vendor/github.com/mgechev/revive/rule/atomic.go
new file mode 100644
index 0000000000..572e141da9
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/atomic.go
@@ -0,0 +1,94 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// AtomicRule lints given else constructs.
+type AtomicRule struct{}
+
+// Apply applies the rule to given file.
+func (r *AtomicRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ walker := atomic{
+ pkgTypesInfo: file.Pkg.TypesInfo,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *AtomicRule) Name() string {
+ return "atomic"
+}
+
+type atomic struct {
+ pkgTypesInfo *types.Info
+ onFailure func(lint.Failure)
+}
+
+func (w atomic) Visit(node ast.Node) ast.Visitor {
+ n, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return w
+ }
+
+ if len(n.Lhs) != len(n.Rhs) {
+ return nil // skip assignment sub-tree
+ }
+ if len(n.Lhs) == 1 && n.Tok == token.DEFINE {
+ return nil // skip assignment sub-tree
+ }
+
+ for i, right := range n.Rhs {
+ call, ok := right.(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ sel, ok := call.Fun.(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+ pkgIdent, _ := sel.X.(*ast.Ident)
+ if w.pkgTypesInfo != nil {
+ pkgName, ok := w.pkgTypesInfo.Uses[pkgIdent].(*types.PkgName)
+ if !ok || pkgName.Imported().Path() != "sync/atomic" {
+ continue
+ }
+ }
+
+ switch sel.Sel.Name {
+ case "AddInt32", "AddInt64", "AddUint32", "AddUint64", "AddUintptr":
+ left := n.Lhs[i]
+ if len(call.Args) != 2 {
+ continue
+ }
+ arg := call.Args[0]
+ broken := false
+
+ if uarg, ok := arg.(*ast.UnaryExpr); ok && uarg.Op == token.AND {
+ broken = gofmt(left) == gofmt(uarg.X)
+ } else if star, ok := left.(*ast.StarExpr); ok {
+ broken = gofmt(star.X) == gofmt(arg)
+ }
+
+ if broken {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: "direct assignment to atomic value",
+ Node: n,
+ })
+ }
+ }
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/bare-return.go b/vendor/github.com/mgechev/revive/rule/bare-return.go
new file mode 100644
index 0000000000..3ee4c4adc2
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/bare-return.go
@@ -0,0 +1,84 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// BareReturnRule lints given else constructs.
+type BareReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (r *BareReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintBareReturnRule{onFailure: onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *BareReturnRule) Name() string {
+ return "bare-return"
+}
+
+type lintBareReturnRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintBareReturnRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ w.checkFunc(n.Type.Results, n.Body)
+ case *ast.FuncLit: // to cope with deferred functions and go-routines
+ w.checkFunc(n.Type.Results, n.Body)
+ }
+
+ return w
+}
+
+// checkFunc will verify if the given function has named result and bare returns
+func (w lintBareReturnRule) checkFunc(results *ast.FieldList, body *ast.BlockStmt) {
+ hasNamedResults := results != nil && len(results.List) > 0 && results.List[0].Names != nil
+ if !hasNamedResults || body == nil {
+ return // nothing to do
+ }
+
+ brf := bareReturnFinder{w.onFailure}
+ ast.Walk(brf, body)
+}
+
+type bareReturnFinder struct {
+ onFailure func(lint.Failure)
+}
+
+func (w bareReturnFinder) Visit(node ast.Node) ast.Visitor {
+ _, ok := node.(*ast.FuncLit)
+ if ok {
+ // skip analysing function literals
+ // they will analyzed by the lintBareReturnRule.Visit method
+ return nil
+ }
+
+ rs, ok := node.(*ast.ReturnStmt)
+ if !ok {
+ return w
+ }
+
+ if len(rs.Results) > 0 {
+ return w
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: rs,
+ Failure: "avoid using bare returns, please add return expressions",
+ })
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/blank-imports.go b/vendor/github.com/mgechev/revive/rule/blank-imports.go
new file mode 100644
index 0000000000..0a00e3707d
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/blank-imports.go
@@ -0,0 +1,74 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// BlankImportsRule lints given else constructs.
+type BlankImportsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *BlankImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintBlankImports{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *BlankImportsRule) Name() string {
+ return "blank-imports"
+}
+
+type lintBlankImports struct {
+ fileAst *ast.File
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintBlankImports) Visit(_ ast.Node) ast.Visitor {
+ // In package main and in tests, we don't complain about blank imports.
+ if w.file.Pkg.IsMain() || w.file.IsTest() {
+ return nil
+ }
+
+ // The first element of each contiguous group of blank imports should have
+ // an explanatory comment of some kind.
+ for i, imp := range w.fileAst.Imports {
+ pos := w.file.ToPosition(imp.Pos())
+
+ if !isBlank(imp.Name) {
+ continue // Ignore non-blank imports.
+ }
+ if i > 0 {
+ prev := w.fileAst.Imports[i-1]
+ prevPos := w.file.ToPosition(prev.Pos())
+ if isBlank(prev.Name) && prevPos.Line+1 == pos.Line {
+ continue // A subsequent blank in a group.
+ }
+ }
+
+ // This is the first blank import of a group.
+ if imp.Doc == nil && imp.Comment == nil {
+ w.onFailure(lint.Failure{
+ Node: imp,
+ Failure: "a blank import should be only in a main or test package, or have a comment justifying it",
+ Confidence: 1,
+ Category: "imports",
+ })
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
new file mode 100644
index 0000000000..0a4e696c63
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/bool-literal-in-expr.go
@@ -0,0 +1,73 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// BoolLiteralRule warns when logic expressions contains Boolean literals.
+type BoolLiteralRule struct{}
+
+// Apply applies the rule to given file.
+func (r *BoolLiteralRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ astFile := file.AST
+ w := &lintBoolLiteral{astFile, onFailure}
+ ast.Walk(w, astFile)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *BoolLiteralRule) Name() string {
+ return "bool-literal-in-expr"
+}
+
+type lintBoolLiteral struct {
+ file *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintBoolLiteral) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.BinaryExpr:
+ if !isBoolOp(n.Op) {
+ return w
+ }
+
+ lexeme, ok := isExprABooleanLit(n.X)
+ if !ok {
+ lexeme, ok = isExprABooleanLit(n.Y)
+
+ if !ok {
+ return w
+ }
+ }
+
+ isConstant := (n.Op == token.LAND && lexeme == "false") || (n.Op == token.LOR && lexeme == "true")
+
+ if isConstant {
+ w.addFailure(n, "Boolean expression seems to always evaluate to "+lexeme, "logic")
+ } else {
+ w.addFailure(n, "omit Boolean literal in expression", "style")
+ }
+ }
+
+ return w
+}
+
+func (w lintBoolLiteral) addFailure(node ast.Node, msg string, cat string) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: cat,
+ Failure: msg,
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/call-to-gc.go b/vendor/github.com/mgechev/revive/rule/call-to-gc.go
new file mode 100644
index 0000000000..06126611bc
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/call-to-gc.go
@@ -0,0 +1,70 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// CallToGCRule lints calls to the garbage collector.
+type CallToGCRule struct{}
+
+// Apply applies the rule to given file.
+func (r *CallToGCRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ var gcTriggeringFunctions = map[string]map[string]bool{
+ "runtime": map[string]bool{"GC": true},
+ }
+
+ w := lintCallToGC{onFailure, gcTriggeringFunctions}
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *CallToGCRule) Name() string {
+ return "call-to-gc"
+}
+
+type lintCallToGC struct {
+ onFailure func(lint.Failure)
+ gcTriggeringFunctions map[string]map[string]bool
+}
+
+func (w lintCallToGC) Visit(node ast.Node) ast.Visitor {
+ ce, ok := node.(*ast.CallExpr)
+ if !ok {
+ return w // nothing to do, the node is not a call
+ }
+
+ fc, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return nil // nothing to do, the call is not of the form pkg.func(...)
+ }
+
+ id, ok := fc.X.(*ast.Ident)
+
+ if !ok {
+ return nil // in case X is not an id (it should be!)
+ }
+
+ fn := fc.Sel.Name
+ pkg := id.Name
+ if !w.gcTriggeringFunctions[pkg][fn] {
+ return nil // it isn't a call to a GC triggering function
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "bad practice",
+ Failure: "explicit call to the garbage collector",
+ })
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
new file mode 100644
index 0000000000..711aa22897
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/cognitive-complexity.go
@@ -0,0 +1,195 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+ "golang.org/x/tools/go/ast/astutil"
+)
+
+// CognitiveComplexityRule lints given else constructs.
+type CognitiveComplexityRule struct{}
+
+// Apply applies the rule to given file.
+func (r *CognitiveComplexityRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ const expectedArgumentsCount = 1
+ if len(arguments) < expectedArgumentsCount {
+ panic(fmt.Sprintf("not enough arguments for cognitive-complexity, expected %d, got %d", expectedArgumentsCount, len(arguments)))
+ }
+ complexity, ok := arguments[0].(int64)
+ if !ok {
+ panic(fmt.Sprintf("invalid argument type for cognitive-complexity, expected int64, got %T", arguments[0]))
+ }
+
+ linter := cognitiveComplexityLinter{
+ file: file,
+ maxComplexity: int(complexity),
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ linter.lint()
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *CognitiveComplexityRule) Name() string {
+ return "cognitive-complexity"
+}
+
+type cognitiveComplexityLinter struct {
+ file *lint.File
+ maxComplexity int
+ onFailure func(lint.Failure)
+}
+
+func (w cognitiveComplexityLinter) lint() {
+ f := w.file
+ for _, decl := range f.AST.Decls {
+ if fn, ok := decl.(*ast.FuncDecl); ok {
+ v := cognitiveComplexityVisitor{}
+ c := v.subTreeComplexity(fn.Body)
+ if c > w.maxComplexity {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Category: "maintenance",
+ Failure: fmt.Sprintf("function %s has cognitive complexity %d (> max enabled %d)", funcName(fn), c, w.maxComplexity),
+ Node: fn,
+ })
+ }
+ }
+ }
+}
+
+type cognitiveComplexityVisitor struct {
+ complexity int
+ nestingLevel int
+}
+
+// subTreeComplexity calculates the cognitive complexity of an AST-subtree.
+func (v cognitiveComplexityVisitor) subTreeComplexity(n ast.Node) int {
+ ast.Walk(&v, n)
+ return v.complexity
+}
+
+// Visit implements the ast.Visitor interface.
+func (v *cognitiveComplexityVisitor) Visit(n ast.Node) ast.Visitor {
+ switch n := n.(type) {
+ case *ast.IfStmt:
+ targets := []ast.Node{n.Cond, n.Body, n.Else}
+ v.walk(1, targets...)
+ return nil
+ case *ast.ForStmt:
+ targets := []ast.Node{n.Cond, n.Body}
+ v.walk(1, targets...)
+ return nil
+ case *ast.RangeStmt:
+ v.walk(1, n.Body)
+ return nil
+ case *ast.SelectStmt:
+ v.walk(1, n.Body)
+ return nil
+ case *ast.SwitchStmt:
+ v.walk(1, n.Body)
+ return nil
+ case *ast.TypeSwitchStmt:
+ v.walk(1, n.Body)
+ return nil
+ case *ast.FuncLit:
+ v.walk(0, n.Body) // do not increment the complexity, just do the nesting
+ return nil
+ case *ast.BinaryExpr:
+ v.complexity += v.binExpComplexity(n)
+ return nil // skip visiting binexp sub-tree (already visited by binExpComplexity)
+ case *ast.BranchStmt:
+ if n.Label != nil {
+ v.complexity += 1
+ }
+ }
+ // TODO handle (at least) direct recursion
+
+ return v
+}
+
+func (v *cognitiveComplexityVisitor) walk(complexityIncrement int, targets ...ast.Node) {
+ v.complexity += complexityIncrement + v.nestingLevel
+ nesting := v.nestingLevel
+ v.nestingLevel++
+
+ for _, t := range targets {
+ if t == nil {
+ continue
+ }
+
+ ast.Walk(v, t)
+ }
+
+ v.nestingLevel = nesting
+}
+
+func (cognitiveComplexityVisitor) binExpComplexity(n *ast.BinaryExpr) int {
+ calculator := binExprComplexityCalculator{opsStack: []token.Token{}}
+
+ astutil.Apply(n, calculator.pre, calculator.post)
+
+ return calculator.complexity
+}
+
+type binExprComplexityCalculator struct {
+ complexity int
+ opsStack []token.Token // stack of bool operators
+ subexpStarted bool
+}
+
+func (becc *binExprComplexityCalculator) pre(c *astutil.Cursor) bool {
+ switch n := c.Node().(type) {
+ case *ast.BinaryExpr:
+ isBoolOp := n.Op == token.LAND || n.Op == token.LOR
+ if !isBoolOp {
+ break
+ }
+
+ ops := len(becc.opsStack)
+ // if
+ // is the first boolop in the expression OR
+ // is the first boolop inside a subexpression (...) OR
+ // is not the same to the previous one
+ // then
+ // increment complexity
+ if ops == 0 || becc.subexpStarted || n.Op != becc.opsStack[ops-1] {
+ becc.complexity++
+ becc.subexpStarted = false
+ }
+
+ becc.opsStack = append(becc.opsStack, n.Op)
+ case *ast.ParenExpr:
+ becc.subexpStarted = true
+ }
+
+ return true
+}
+
+func (becc *binExprComplexityCalculator) post(c *astutil.Cursor) bool {
+ switch n := c.Node().(type) {
+ case *ast.BinaryExpr:
+ isBoolOp := n.Op == token.LAND || n.Op == token.LOR
+ if !isBoolOp {
+ break
+ }
+
+ ops := len(becc.opsStack)
+ if ops > 0 {
+ becc.opsStack = becc.opsStack[:ops-1]
+ }
+ case *ast.ParenExpr:
+ becc.subexpStarted = false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/mgechev/revive/rule/confusing-naming.go b/vendor/github.com/mgechev/revive/rule/confusing-naming.go
new file mode 100644
index 0000000000..143bb18c33
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/confusing-naming.go
@@ -0,0 +1,190 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "strings"
+ "sync"
+
+ "github.com/mgechev/revive/lint"
+)
+
+type referenceMethod struct {
+ fileName string
+ id *ast.Ident
+}
+
+type pkgMethods struct {
+ pkg *lint.Package
+ methods map[string]map[string]*referenceMethod
+ mu *sync.Mutex
+}
+
+type packages struct {
+ pkgs []pkgMethods
+ mu sync.Mutex
+}
+
+func (ps *packages) methodNames(lp *lint.Package) pkgMethods {
+ ps.mu.Lock()
+
+ for _, pkg := range ps.pkgs {
+ if pkg.pkg == lp {
+ ps.mu.Unlock()
+ return pkg
+ }
+ }
+
+ pkgm := pkgMethods{pkg: lp, methods: make(map[string]map[string]*referenceMethod), mu: &sync.Mutex{}}
+ ps.pkgs = append(ps.pkgs, pkgm)
+
+ ps.mu.Unlock()
+ return pkgm
+}
+
+var allPkgs = packages{pkgs: make([]pkgMethods, 1)}
+
+// ConfusingNamingRule lints method names that differ only by capitalization
+type ConfusingNamingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ConfusingNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ fileAst := file.AST
+ pkgm := allPkgs.methodNames(file.Pkg)
+ walker := lintConfusingNames{
+ fileName: file.Name,
+ pkgm: pkgm,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(&walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ConfusingNamingRule) Name() string {
+ return "confusing-naming"
+}
+
+//checkMethodName checks if a given method/function name is similar (just case differences) to other method/function of the same struct/file.
+func checkMethodName(holder string, id *ast.Ident, w *lintConfusingNames) {
+ if id.Name == "init" && holder == defaultStructName {
+ // ignore init functions
+ return
+ }
+
+ pkgm := w.pkgm
+ name := strings.ToUpper(id.Name)
+
+ pkgm.mu.Lock()
+ defer pkgm.mu.Unlock()
+
+ if pkgm.methods[holder] != nil {
+ if pkgm.methods[holder][name] != nil {
+ refMethod := pkgm.methods[holder][name]
+ // confusing names
+ var kind string
+ if holder == defaultStructName {
+ kind = "function"
+ } else {
+ kind = "method"
+ }
+ var fileName string
+ if w.fileName == refMethod.fileName {
+ fileName = "the same source file"
+ } else {
+ fileName = refMethod.fileName
+ }
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("Method '%s' differs only by capitalization to %s '%s' in %s", id.Name, kind, refMethod.id.Name, fileName),
+ Confidence: 1,
+ Node: id,
+ Category: "naming",
+ })
+
+ return
+ }
+ } else {
+ pkgm.methods[holder] = make(map[string]*referenceMethod, 1)
+ }
+
+ // update the black list
+ if pkgm.methods[holder] == nil {
+ println("no entry for '", holder, "'")
+ }
+ pkgm.methods[holder][name] = &referenceMethod{fileName: w.fileName, id: id}
+}
+
+type lintConfusingNames struct {
+ fileName string
+ pkgm pkgMethods
+ onFailure func(lint.Failure)
+}
+
+const defaultStructName = "_" // used to map functions
+
+//getStructName of a function receiver. Defaults to defaultStructName
+func getStructName(r *ast.FieldList) string {
+ result := defaultStructName
+
+ if r == nil || len(r.List) < 1 {
+ return result
+ }
+
+ t := r.List[0].Type
+
+ if p, _ := t.(*ast.StarExpr); p != nil { // if a pointer receiver => dereference pointer receiver types
+ t = p.X
+ }
+
+ if p, _ := t.(*ast.Ident); p != nil {
+ result = p.Name
+ }
+
+ return result
+}
+
+func checkStructFields(fields *ast.FieldList, structName string, w *lintConfusingNames) {
+ bl := make(map[string]bool, len(fields.List))
+ for _, f := range fields.List {
+ for _, id := range f.Names {
+ normName := strings.ToUpper(id.Name)
+ if bl[normName] {
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("Field '%s' differs only by capitalization to other field in the struct type %s", id.Name, structName),
+ Confidence: 1,
+ Node: id,
+ Category: "naming",
+ })
+ } else {
+ bl[normName] = true
+ }
+ }
+ }
+}
+
+func (w *lintConfusingNames) Visit(n ast.Node) ast.Visitor {
+ switch v := n.(type) {
+ case *ast.FuncDecl:
+ // Exclude naming warnings for functions that are exported to C but
+ // not exported in the Go API.
+ // See https://github.com/golang/lint/issues/144.
+ if ast.IsExported(v.Name.Name) || !isCgoExported(v) {
+ checkMethodName(getStructName(v.Recv), v.Name, w)
+ }
+ case *ast.TypeSpec:
+ if s, ok := v.Type.(*ast.StructType); ok {
+ checkStructFields(s.Fields, v.Name.Name, w)
+ }
+
+ default:
+ // will add other checks like field names, struct names, etc.
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/confusing-results.go b/vendor/github.com/mgechev/revive/rule/confusing-results.go
new file mode 100644
index 0000000000..1d386b3db5
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/confusing-results.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ConfusingResultsRule lints given function declarations
+type ConfusingResultsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ConfusingResultsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintConfusingResults{
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ConfusingResultsRule) Name() string {
+ return "confusing-results"
+}
+
+type lintConfusingResults struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintConfusingResults) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Type.Results == nil || len(fn.Type.Results.List) < 2 {
+ return w
+ }
+ lastType := ""
+ for _, result := range fn.Type.Results.List {
+ if len(result.Names) > 0 {
+ return w
+ }
+
+ t, ok := result.Type.(*ast.Ident)
+ if !ok {
+ return w
+ }
+
+ if t.Name == lastType {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Confidence: 1,
+ Category: "naming",
+ Failure: "unnamed results of the same type may be confusing, consider using named results",
+ })
+ break
+ }
+ lastType = t.Name
+
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
new file mode 100644
index 0000000000..6a91561111
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/constant-logical-expr.go
@@ -0,0 +1,88 @@
+package rule
+
+import (
+ "github.com/mgechev/revive/lint"
+ "go/ast"
+ "go/token"
+)
+
+// ConstantLogicalExprRule warns on constant logical expressions.
+type ConstantLogicalExprRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ConstantLogicalExprRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ astFile := file.AST
+ w := &lintConstantLogicalExpr{astFile, onFailure}
+ ast.Walk(w, astFile)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ConstantLogicalExprRule) Name() string {
+ return "constant-logical-expr"
+}
+
+type lintConstantLogicalExpr struct {
+ file *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintConstantLogicalExpr) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.BinaryExpr:
+ if !w.isOperatorWithLogicalResult(n.Op) {
+ return w
+ }
+
+ if gofmt(n.X) != gofmt(n.Y) { // check if subexpressions are the same
+ return w
+ }
+
+ if n.Op == token.EQL {
+ w.newFailure(n, "expression always evaluates to true")
+ return w
+ }
+
+ if w.isInequalityOperator(n.Op) {
+ w.newFailure(n, "expression always evaluates to false")
+ return w
+ }
+
+ w.newFailure(n, "left and right hand-side sub-expressions are the same")
+ }
+
+ return w
+}
+
+func (w *lintConstantLogicalExpr) isOperatorWithLogicalResult(t token.Token) bool {
+ switch t {
+ case token.LAND, token.LOR, token.EQL, token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ:
+ return true
+ }
+
+ return false
+}
+
+func (w *lintConstantLogicalExpr) isInequalityOperator(t token.Token) bool {
+ switch t {
+ case token.LSS, token.GTR, token.NEQ, token.LEQ, token.GEQ:
+ return true
+ }
+
+ return false
+}
+
+func (w lintConstantLogicalExpr) newFailure(node ast.Node, msg string) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "logic",
+ Failure: msg,
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/context-as-argument.go b/vendor/github.com/mgechev/revive/rule/context-as-argument.go
new file mode 100644
index 0000000000..0ed28a82a5
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/context-as-argument.go
@@ -0,0 +1,60 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ContextAsArgumentRule lints given else constructs.
+type ContextAsArgumentRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ContextAsArgumentRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintContextArguments{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ContextAsArgumentRule) Name() string {
+ return "context-as-argument"
+}
+
+type lintContextArguments struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintContextArguments) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || len(fn.Type.Params.List) <= 1 {
+ return w
+ }
+ // A context.Context should be the first parameter of a function.
+ // Flag any that show up after the first.
+ for _, arg := range fn.Type.Params.List[1:] {
+ if isPkgDot(arg.Type, "context", "Context") {
+ w.onFailure(lint.Failure{
+ Node: fn,
+ Category: "arg-order",
+ Failure: "context.Context should be the first parameter of a function",
+ Confidence: 0.9,
+ })
+ break // only flag one
+ }
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/context-keys-type.go b/vendor/github.com/mgechev/revive/rule/context-keys-type.go
new file mode 100644
index 0000000000..9c2f0bbd7d
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/context-keys-type.go
@@ -0,0 +1,81 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ContextKeysType lints given else constructs.
+type ContextKeysType struct{}
+
+// Apply applies the rule to given file.
+func (r *ContextKeysType) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintContextKeyTypes{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ file.Pkg.TypeCheck()
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ContextKeysType) Name() string {
+ return "context-keys-type"
+}
+
+type lintContextKeyTypes struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintContextKeyTypes) Visit(n ast.Node) ast.Visitor {
+ switch n := n.(type) {
+ case *ast.CallExpr:
+ checkContextKeyType(w, n)
+ }
+
+ return w
+}
+
+func checkContextKeyType(w lintContextKeyTypes, x *ast.CallExpr) {
+ f := w.file
+ sel, ok := x.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return
+ }
+ pkg, ok := sel.X.(*ast.Ident)
+ if !ok || pkg.Name != "context" {
+ return
+ }
+ if sel.Sel.Name != "WithValue" {
+ return
+ }
+
+ // key is second argument to context.WithValue
+ if len(x.Args) != 3 {
+ return
+ }
+ key := f.Pkg.TypesInfo.Types[x.Args[1]]
+
+ if ktyp, ok := key.Type.(*types.Basic); ok && ktyp.Kind() != types.Invalid {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: x,
+ Category: "content",
+ Failure: fmt.Sprintf("should not use basic type %s as key in context.WithValue", key.Type),
+ })
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/cyclomatic.go b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
new file mode 100644
index 0000000000..48ea80a6aa
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/cyclomatic.go
@@ -0,0 +1,115 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// Based on https://github.com/fzipp/gocyclo
+
+// CyclomaticRule lints given else constructs.
+type CyclomaticRule struct{}
+
+// Apply applies the rule to given file.
+func (r *CyclomaticRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ complexity, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic("invalid argument for cyclomatic complexity")
+ }
+
+ fileAst := file.AST
+ walker := lintCyclomatic{
+ file: file,
+ complexity: int(complexity),
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *CyclomaticRule) Name() string {
+ return "cyclomatic"
+}
+
+type lintCyclomatic struct {
+ file *lint.File
+ complexity int
+ onFailure func(lint.Failure)
+}
+
+func (w lintCyclomatic) Visit(_ ast.Node) ast.Visitor {
+ f := w.file
+ for _, decl := range f.AST.Decls {
+ if fn, ok := decl.(*ast.FuncDecl); ok {
+ c := complexity(fn)
+ if c > w.complexity {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Category: "maintenance",
+ Failure: fmt.Sprintf("function %s has cyclomatic complexity %d", funcName(fn), c),
+ Node: fn,
+ })
+ }
+ }
+ }
+ return nil
+}
+
+// funcName returns the name representation of a function or method:
+// "(Type).Name" for methods or simply "Name" for functions.
+func funcName(fn *ast.FuncDecl) string {
+ if fn.Recv != nil {
+ if fn.Recv.NumFields() > 0 {
+ typ := fn.Recv.List[0].Type
+ return fmt.Sprintf("(%s).%s", recvString(typ), fn.Name)
+ }
+ }
+ return fn.Name.Name
+}
+
+// recvString returns a string representation of recv of the
+// form "T", "*T", or "BADRECV" (if not a proper receiver type).
+func recvString(recv ast.Expr) string {
+ switch t := recv.(type) {
+ case *ast.Ident:
+ return t.Name
+ case *ast.StarExpr:
+ return "*" + recvString(t.X)
+ }
+ return "BADRECV"
+}
+
+// complexity calculates the cyclomatic complexity of a function.
+func complexity(fn *ast.FuncDecl) int {
+ v := complexityVisitor{}
+ ast.Walk(&v, fn)
+ return v.Complexity
+}
+
+type complexityVisitor struct {
+ // Complexity is the cyclomatic complexity
+ Complexity int
+}
+
+// Visit implements the ast.Visitor interface.
+func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor {
+ switch n := n.(type) {
+ case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause:
+ v.Complexity++
+ case *ast.BinaryExpr:
+ if n.Op == token.LAND || n.Op == token.LOR {
+ v.Complexity++
+ }
+ }
+ return v
+}
diff --git a/vendor/github.com/mgechev/revive/rule/deep-exit.go b/vendor/github.com/mgechev/revive/rule/deep-exit.go
new file mode 100644
index 0000000000..f49e93dd47
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/deep-exit.go
@@ -0,0 +1,94 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// DeepExitRule lints program exit at functions other than main or init.
+type DeepExitRule struct{}
+
+// Apply applies the rule to given file.
+func (r *DeepExitRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ var exitFunctions = map[string]map[string]bool{
+ "os": map[string]bool{"Exit": true},
+ "syscall": map[string]bool{"Exit": true},
+ "log": map[string]bool{
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ },
+ }
+
+ w := lintDeepExit{onFailure, exitFunctions, file.IsTest()}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *DeepExitRule) Name() string {
+ return "deep-exit"
+}
+
+type lintDeepExit struct {
+ onFailure func(lint.Failure)
+ exitFunctions map[string]map[string]bool
+ isTestFile bool
+}
+
+func (w lintDeepExit) Visit(node ast.Node) ast.Visitor {
+ if fd, ok := node.(*ast.FuncDecl); ok {
+ if w.mustIgnore(fd) {
+ return nil // skip analysis of this function
+ }
+
+ return w
+ }
+
+ se, ok := node.(*ast.ExprStmt)
+ if !ok {
+ return w
+ }
+ ce, ok := se.X.(*ast.CallExpr)
+ if !ok {
+ return w
+ }
+
+ fc, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ return w
+ }
+ id, ok := fc.X.(*ast.Ident)
+ if !ok {
+ return w
+ }
+
+ fn := fc.Sel.Name
+ pkg := id.Name
+ if w.exitFunctions[pkg] != nil && w.exitFunctions[pkg][fn] { // it's a call to an exit function
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: ce,
+ Category: "bad practice",
+ Failure: fmt.Sprintf("calls to %s.%s only in main() or init() functions", pkg, fn),
+ })
+ }
+
+ return w
+}
+
+func (w *lintDeepExit) mustIgnore(fd *ast.FuncDecl) bool {
+ fn := fd.Name.Name
+
+ return fn == "init" || fn == "main" || (w.isTestFile && fn == "TestMain")
+}
diff --git a/vendor/github.com/mgechev/revive/rule/dot-imports.go b/vendor/github.com/mgechev/revive/rule/dot-imports.go
new file mode 100644
index 0000000000..78419d7d6a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/dot-imports.go
@@ -0,0 +1,54 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// DotImportsRule lints given else constructs.
+type DotImportsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *DotImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintImports{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *DotImportsRule) Name() string {
+ return "dot-imports"
+}
+
+type lintImports struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintImports) Visit(_ ast.Node) ast.Visitor {
+ for i, is := range w.fileAst.Imports {
+ _ = i
+ if is.Name != nil && is.Name.Name == "." && !w.file.IsTest() {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: "should not use dot imports",
+ Node: is,
+ Category: "imports",
+ })
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/duplicated-imports.go b/vendor/github.com/mgechev/revive/rule/duplicated-imports.go
new file mode 100644
index 0000000000..485b6a2ead
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/duplicated-imports.go
@@ -0,0 +1,39 @@
+package rule
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// DuplicatedImportsRule lints given else constructs.
+type DuplicatedImportsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *DuplicatedImportsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ impPaths := map[string]struct{}{}
+ for _, imp := range file.AST.Imports {
+ path := imp.Path.Value
+ _, ok := impPaths[path]
+ if ok {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("Package %s already imported", path),
+ Node: imp,
+ Category: "imports",
+ })
+ continue
+ }
+
+ impPaths[path] = struct{}{}
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *DuplicatedImportsRule) Name() string {
+ return "duplicated-imports"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/empty-block.go b/vendor/github.com/mgechev/revive/rule/empty-block.go
new file mode 100644
index 0000000000..7861394b32
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/empty-block.go
@@ -0,0 +1,76 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// EmptyBlockRule lints given else constructs.
+type EmptyBlockRule struct{}
+
+// Apply applies the rule to given file.
+func (r *EmptyBlockRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintEmptyBlock{make([]*ast.BlockStmt, 0), onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *EmptyBlockRule) Name() string {
+ return "empty-block"
+}
+
+type lintEmptyBlock struct {
+ ignore []*ast.BlockStmt
+ onFailure func(lint.Failure)
+}
+
+func (w lintEmptyBlock) Visit(node ast.Node) ast.Visitor {
+ fd, ok := node.(*ast.FuncDecl)
+ if ok {
+ w.ignore = append(w.ignore, fd.Body)
+ return w
+ }
+
+ fl, ok := node.(*ast.FuncLit)
+ if ok {
+ w.ignore = append(w.ignore, fl.Body)
+ return w
+ }
+
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return w
+ }
+
+ if mustIgnore(block, w.ignore) {
+ return w
+ }
+
+ if len(block.List) == 0 {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: block,
+ Category: "logic",
+ Failure: "this block is empty, you can remove it",
+ })
+ }
+
+ return w
+}
+
+func mustIgnore(block *ast.BlockStmt, blackList []*ast.BlockStmt) bool {
+ for _, b := range blackList {
+ if b == block {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/mgechev/revive/rule/empty-lines.go b/vendor/github.com/mgechev/revive/rule/empty-lines.go
new file mode 100644
index 0000000000..61d9281bfc
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/empty-lines.go
@@ -0,0 +1,113 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// EmptyLinesRule lints empty lines in blocks.
+type EmptyLinesRule struct{}
+
+// Apply applies the rule to given file.
+func (r *EmptyLinesRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintEmptyLines{file, file.CommentMap(), onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *EmptyLinesRule) Name() string {
+ return "empty-lines"
+}
+
+type lintEmptyLines struct {
+ file *lint.File
+ cmap ast.CommentMap
+ onFailure func(lint.Failure)
+}
+
+func (w lintEmptyLines) Visit(node ast.Node) ast.Visitor {
+ block, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return w
+ }
+
+ w.checkStart(block)
+ w.checkEnd(block)
+
+ return w
+}
+
+func (w lintEmptyLines) checkStart(block *ast.BlockStmt) {
+ if len(block.List) == 0 {
+ return
+ }
+
+ start := w.position(block.Lbrace)
+ firstNode := block.List[0]
+
+ if w.commentBetween(start, firstNode) {
+ return
+ }
+
+ first := w.position(firstNode.Pos())
+ if first.Line-start.Line > 1 {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: block,
+ Category: "style",
+ Failure: "extra empty line at the start of a block",
+ })
+ }
+}
+
+func (w lintEmptyLines) checkEnd(block *ast.BlockStmt) {
+ if len(block.List) < 1 {
+ return
+ }
+
+ end := w.position(block.Rbrace)
+ lastNode := block.List[len(block.List)-1]
+
+ if w.commentBetween(end, lastNode) {
+ return
+ }
+
+ last := w.position(lastNode.End())
+ if end.Line-last.Line > 1 {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: lastNode,
+ Category: "style",
+ Failure: "extra empty line at the end of a block",
+ })
+ }
+}
+
+func (w lintEmptyLines) commentBetween(position token.Position, node ast.Node) bool {
+ comments := w.cmap.Filter(node).Comments()
+ if len(comments) == 0 {
+ return false
+ }
+
+ for _, comment := range comments {
+ start, end := w.position(comment.Pos()), w.position(comment.End())
+ if start.Line-position.Line == 1 || position.Line-end.Line == 1 {
+ return true
+ }
+ }
+
+ return false
+}
+
+func (w lintEmptyLines) position(pos token.Pos) token.Position {
+ return w.file.ToPosition(pos)
+}
diff --git a/vendor/github.com/mgechev/revive/rule/error-naming.go b/vendor/github.com/mgechev/revive/rule/error-naming.go
new file mode 100644
index 0000000000..3a1080625e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/error-naming.go
@@ -0,0 +1,79 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ErrorNamingRule lints given else constructs.
+type ErrorNamingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ErrorNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintErrors{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ErrorNamingRule) Name() string {
+ return "error-naming"
+}
+
+type lintErrors struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintErrors) Visit(_ ast.Node) ast.Visitor {
+ for _, decl := range w.fileAst.Decls {
+ gd, ok := decl.(*ast.GenDecl)
+ if !ok || gd.Tok != token.VAR {
+ continue
+ }
+ for _, spec := range gd.Specs {
+ spec := spec.(*ast.ValueSpec)
+ if len(spec.Names) != 1 || len(spec.Values) != 1 {
+ continue
+ }
+ ce, ok := spec.Values[0].(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
+ continue
+ }
+
+ id := spec.Names[0]
+ prefix := "err"
+ if id.IsExported() {
+ prefix = "Err"
+ }
+ if !strings.HasPrefix(id.Name, prefix) {
+ w.onFailure(lint.Failure{
+ Node: id,
+ Confidence: 0.9,
+ Category: "naming",
+ Failure: fmt.Sprintf("error var %s should have name of the form %sFoo", id.Name, prefix),
+ })
+ }
+ }
+ }
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/error-return.go b/vendor/github.com/mgechev/revive/rule/error-return.go
new file mode 100644
index 0000000000..737d8c66f7
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/error-return.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ErrorReturnRule lints given else constructs.
+type ErrorReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ErrorReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintErrorReturn{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ErrorReturnRule) Name() string {
+ return "error-return"
+}
+
+type lintErrorReturn struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintErrorReturn) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Type.Results == nil {
+ return w
+ }
+ ret := fn.Type.Results.List
+ if len(ret) <= 1 {
+ return w
+ }
+ if isIdent(ret[len(ret)-1].Type, "error") {
+ return nil
+ }
+ // An error return parameter should be the last parameter.
+ // Flag any error parameters found before the last.
+ for _, r := range ret[:len(ret)-1] {
+ if isIdent(r.Type, "error") {
+ w.onFailure(lint.Failure{
+ Category: "arg-order",
+ Confidence: 0.9,
+ Node: fn,
+ Failure: "error should be the last type when returning multiple items",
+ })
+ break // only flag one
+ }
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/error-strings.go b/vendor/github.com/mgechev/revive/rule/error-strings.go
new file mode 100644
index 0000000000..b8a5b7ed7a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/error-strings.go
@@ -0,0 +1,98 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+ "strconv"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ErrorStringsRule lints given else constructs.
+type ErrorStringsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ErrorStringsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintErrorStrings{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ErrorStringsRule) Name() string {
+ return "error-strings"
+}
+
+type lintErrorStrings struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintErrorStrings) Visit(n ast.Node) ast.Visitor {
+ ce, ok := n.(*ast.CallExpr)
+ if !ok {
+ return w
+ }
+ if !isPkgDot(ce.Fun, "errors", "New") && !isPkgDot(ce.Fun, "fmt", "Errorf") {
+ return w
+ }
+ if len(ce.Args) < 1 {
+ return w
+ }
+ str, ok := ce.Args[0].(*ast.BasicLit)
+ if !ok || str.Kind != token.STRING {
+ return w
+ }
+ s, _ := strconv.Unquote(str.Value) // can assume well-formed Go
+ if s == "" {
+ return w
+ }
+ clean, conf := lintErrorString(s)
+ if clean {
+ return w
+ }
+
+ w.onFailure(lint.Failure{
+ Node: str,
+ Confidence: conf,
+ Category: "errors",
+ Failure: "error strings should not be capitalized or end with punctuation or a newline",
+ })
+ return w
+}
+
+func lintErrorString(s string) (isClean bool, conf float64) {
+ const basicConfidence = 0.8
+ const capConfidence = basicConfidence - 0.2
+ first, firstN := utf8.DecodeRuneInString(s)
+ last, _ := utf8.DecodeLastRuneInString(s)
+ if last == '.' || last == ':' || last == '!' || last == '\n' {
+ return false, basicConfidence
+ }
+ if unicode.IsUpper(first) {
+ // People use proper nouns and exported Go identifiers in error strings,
+ // so decrease the confidence of warnings for capitalization.
+ if len(s) <= firstN {
+ return false, capConfidence
+ }
+ // Flag strings starting with something that doesn't look like an initialism.
+ if second, _ := utf8.DecodeRuneInString(s[firstN:]); !unicode.IsUpper(second) {
+ return false, capConfidence
+ }
+ }
+ return true, 0
+}
diff --git a/vendor/github.com/mgechev/revive/rule/errorf.go b/vendor/github.com/mgechev/revive/rule/errorf.go
new file mode 100644
index 0000000000..1bffbab5bc
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/errorf.go
@@ -0,0 +1,93 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "regexp"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ErrorfRule lints given else constructs.
+type ErrorfRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ErrorfRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintErrorf{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ file.Pkg.TypeCheck()
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ErrorfRule) Name() string {
+ return "errorf"
+}
+
+type lintErrorf struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintErrorf) Visit(n ast.Node) ast.Visitor {
+ ce, ok := n.(*ast.CallExpr)
+ if !ok || len(ce.Args) != 1 {
+ return w
+ }
+ isErrorsNew := isPkgDot(ce.Fun, "errors", "New")
+ var isTestingError bool
+ se, ok := ce.Fun.(*ast.SelectorExpr)
+ if ok && se.Sel.Name == "Error" {
+ if typ := w.file.Pkg.TypeOf(se.X); typ != nil {
+ isTestingError = typ.String() == "*testing.T"
+ }
+ }
+ if !isErrorsNew && !isTestingError {
+ return w
+ }
+ arg := ce.Args[0]
+ ce, ok = arg.(*ast.CallExpr)
+ if !ok || !isPkgDot(ce.Fun, "fmt", "Sprintf") {
+ return w
+ }
+ errorfPrefix := "fmt"
+ if isTestingError {
+ errorfPrefix = w.file.Render(se.X)
+ }
+
+ failure := lint.Failure{
+ Category: "errors",
+ Node: n,
+ Confidence: 1,
+ Failure: fmt.Sprintf("should replace %s(fmt.Sprintf(...)) with %s.Errorf(...)", w.file.Render(se), errorfPrefix),
+ }
+
+ m := srcLineWithMatch(w.file, ce, `^(.*)`+w.file.Render(se)+`\(fmt\.Sprintf\((.*)\)\)(.*)$`)
+ if m != nil {
+ failure.ReplacementLine = m[1] + errorfPrefix + ".Errorf(" + m[2] + ")" + m[3]
+ }
+
+ w.onFailure(failure)
+
+ return w
+}
+
+func srcLineWithMatch(file *lint.File, node ast.Node, pattern string) (m []string) {
+ line := srcLine(file.Content(), file.ToPosition(node.Pos()))
+ line = strings.TrimSuffix(line, "\n")
+ rx := regexp.MustCompile(pattern)
+ return rx.FindStringSubmatch(line)
+}
diff --git a/vendor/github.com/mgechev/revive/rule/exported.go b/vendor/github.com/mgechev/revive/rule/exported.go
new file mode 100644
index 0000000000..b68f2bacc1
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/exported.go
@@ -0,0 +1,272 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+ "unicode"
+ "unicode/utf8"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ExportedRule lints given else constructs.
+type ExportedRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ExportedRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ if isTest(file) {
+ return failures
+ }
+
+ fileAst := file.AST
+ walker := lintExported{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ genDeclMissingComments: make(map[*ast.GenDecl]bool),
+ }
+
+ ast.Walk(&walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ExportedRule) Name() string {
+ return "exported"
+}
+
+type lintExported struct {
+ file *lint.File
+ fileAst *ast.File
+ lastGen *ast.GenDecl
+ genDeclMissingComments map[*ast.GenDecl]bool
+ onFailure func(lint.Failure)
+}
+
+func (w *lintExported) lintFuncDoc(fn *ast.FuncDecl) {
+ if !ast.IsExported(fn.Name.Name) {
+ // func is unexported
+ return
+ }
+ kind := "function"
+ name := fn.Name.Name
+ if fn.Recv != nil && len(fn.Recv.List) > 0 {
+ // method
+ kind = "method"
+ recv := receiverType(fn)
+ if !ast.IsExported(recv) {
+ // receiver is unexported
+ return
+ }
+ if commonMethods[name] {
+ return
+ }
+ switch name {
+ case "Len", "Less", "Swap":
+ if w.file.Pkg.Sortable[recv] {
+ return
+ }
+ }
+ name = recv + "." + name
+ }
+ if fn.Doc == nil {
+ w.onFailure(lint.Failure{
+ Node: fn,
+ Confidence: 1,
+ Category: "comments",
+ Failure: fmt.Sprintf("exported %s %s should have comment or be unexported", kind, name),
+ })
+ return
+ }
+ s := normalizeText(fn.Doc.Text())
+ prefix := fn.Name.Name + " "
+ if !strings.HasPrefix(s, prefix) {
+ w.onFailure(lint.Failure{
+ Node: fn.Doc,
+ Confidence: 0.8,
+ Category: "comments",
+ Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix),
+ })
+ }
+}
+
+func (w *lintExported) checkStutter(id *ast.Ident, thing string) {
+ pkg, name := w.fileAst.Name.Name, id.Name
+ if !ast.IsExported(name) {
+ // unexported name
+ return
+ }
+ // A name stutters if the package name is a strict prefix
+ // and the next character of the name starts a new word.
+ if len(name) <= len(pkg) {
+ // name is too short to stutter.
+ // This permits the name to be the same as the package name.
+ return
+ }
+ if !strings.EqualFold(pkg, name[:len(pkg)]) {
+ return
+ }
+ // We can assume the name is well-formed UTF-8.
+ // If the next rune after the package name is uppercase or an underscore
+ // the it's starting a new word and thus this name stutters.
+ rem := name[len(pkg):]
+ if next, _ := utf8.DecodeRuneInString(rem); next == '_' || unicode.IsUpper(next) {
+ w.onFailure(lint.Failure{
+ Node: id,
+ Confidence: 0.8,
+ Category: "naming",
+ Failure: fmt.Sprintf("%s name will be used as %s.%s by other packages, and that stutters; consider calling this %s", thing, pkg, name, rem),
+ })
+ }
+}
+
+func (w *lintExported) lintTypeDoc(t *ast.TypeSpec, doc *ast.CommentGroup) {
+ if !ast.IsExported(t.Name.Name) {
+ return
+ }
+ if doc == nil {
+ w.onFailure(lint.Failure{
+ Node: t,
+ Confidence: 1,
+ Category: "comments",
+ Failure: fmt.Sprintf("exported type %v should have comment or be unexported", t.Name),
+ })
+ return
+ }
+
+ s := normalizeText(doc.Text())
+ articles := [...]string{"A", "An", "The", "This"}
+ for _, a := range articles {
+ if t.Name.Name == a {
+ continue
+ }
+ if strings.HasPrefix(s, a+" ") {
+ s = s[len(a)+1:]
+ break
+ }
+ }
+ if !strings.HasPrefix(s, t.Name.Name+" ") {
+ w.onFailure(lint.Failure{
+ Node: doc,
+ Confidence: 1,
+ Category: "comments",
+ Failure: fmt.Sprintf(`comment on exported type %v should be of the form "%v ..." (with optional leading article)`, t.Name, t.Name),
+ })
+ }
+}
+
+func (w *lintExported) lintValueSpecDoc(vs *ast.ValueSpec, gd *ast.GenDecl, genDeclMissingComments map[*ast.GenDecl]bool) {
+ kind := "var"
+ if gd.Tok == token.CONST {
+ kind = "const"
+ }
+
+ if len(vs.Names) > 1 {
+ // Check that none are exported except for the first.
+ for _, n := range vs.Names[1:] {
+ if ast.IsExported(n.Name) {
+ w.onFailure(lint.Failure{
+ Category: "comments",
+ Confidence: 1,
+ Failure: fmt.Sprintf("exported %s %s should have its own declaration", kind, n.Name),
+ Node: vs,
+ })
+ return
+ }
+ }
+ }
+
+ // Only one name.
+ name := vs.Names[0].Name
+ if !ast.IsExported(name) {
+ return
+ }
+
+ if vs.Doc == nil && gd.Doc == nil {
+ if genDeclMissingComments[gd] {
+ return
+ }
+ block := ""
+ if kind == "const" && gd.Lparen.IsValid() {
+ block = " (or a comment on this block)"
+ }
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: vs,
+ Category: "comments",
+ Failure: fmt.Sprintf("exported %s %s should have comment%s or be unexported", kind, name, block),
+ })
+ genDeclMissingComments[gd] = true
+ return
+ }
+ // If this GenDecl has parens and a comment, we don't check its comment form.
+ if gd.Lparen.IsValid() && gd.Doc != nil {
+ return
+ }
+ // The relevant text to check will be on either vs.Doc or gd.Doc.
+ // Use vs.Doc preferentially.
+ doc := vs.Doc
+ if doc == nil {
+ doc = gd.Doc
+ }
+ prefix := name + " "
+ s := normalizeText(doc.Text())
+ if !strings.HasPrefix(s, prefix) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: doc,
+ Category: "comments",
+ Failure: fmt.Sprintf(`comment on exported %s %s should be of the form "%s..."`, kind, name, prefix),
+ })
+ }
+}
+
+// normalizeText is a helper function that normalizes comment strings by:
+// * removing one leading space
+//
+// This function is needed because ast.CommentGroup.Text() does not handle //-style and /*-style comments uniformly
+func normalizeText(t string) string {
+ return strings.TrimPrefix(t, " ")
+}
+
+func (w *lintExported) Visit(n ast.Node) ast.Visitor {
+ switch v := n.(type) {
+ case *ast.GenDecl:
+ if v.Tok == token.IMPORT {
+ return nil
+ }
+ // token.CONST, token.TYPE or token.VAR
+ w.lastGen = v
+ return w
+ case *ast.FuncDecl:
+ w.lintFuncDoc(v)
+ if v.Recv == nil {
+ // Only check for stutter on functions, not methods.
+ // Method names are not used package-qualified.
+ w.checkStutter(v.Name, "func")
+ }
+ // Don't proceed inside funcs.
+ return nil
+ case *ast.TypeSpec:
+ // inside a GenDecl, which usually has the doc
+ doc := v.Doc
+ if doc == nil {
+ doc = w.lastGen.Doc
+ }
+ w.lintTypeDoc(v, doc)
+ w.checkStutter(v.Name, "type")
+ // Don't proceed inside types.
+ return nil
+ case *ast.ValueSpec:
+ w.lintValueSpecDoc(v, w.lastGen, w.genDeclMissingComments)
+ return nil
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/file-header.go b/vendor/github.com/mgechev/revive/rule/file-header.go
new file mode 100644
index 0000000000..6df974e91a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/file-header.go
@@ -0,0 +1,69 @@
+package rule
+
+import (
+ "regexp"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FileHeaderRule lints given else constructs.
+type FileHeaderRule struct{}
+
+var (
+ multiRegexp = regexp.MustCompile("^/\\*")
+ singleRegexp = regexp.MustCompile("^//")
+)
+
+// Apply applies the rule to given file.
+func (r *FileHeaderRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ if len(arguments) != 1 {
+ panic(`invalid configuration for "file-header" rule`)
+ }
+
+ header, ok := arguments[0].(string)
+ if !ok {
+ panic(`invalid argument for "file-header" rule: first argument should be a string`)
+ }
+
+ failure := []lint.Failure{
+ {
+ Node: file.AST,
+ Confidence: 1,
+ Failure: "the file doesn't have an appropriate header",
+ },
+ }
+
+ if len(file.AST.Comments) == 0 {
+ return failure
+ }
+
+ g := file.AST.Comments[0]
+ if g == nil {
+ return failure
+ }
+ comment := ""
+ for _, c := range g.List {
+ text := c.Text
+ if multiRegexp.Match([]byte(text)) {
+ text = text[2 : len(text)-2]
+ } else if singleRegexp.Match([]byte(text)) {
+ text = text[2:]
+ }
+ comment += text
+ }
+
+ regex, err := regexp.Compile(header)
+ if err != nil {
+ panic(err.Error())
+ }
+
+ if !regex.Match([]byte(comment)) {
+ return failure
+ }
+ return nil
+}
+
+// Name returns the rule name.
+func (r *FileHeaderRule) Name() string {
+ return "file-header"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/flag-param.go b/vendor/github.com/mgechev/revive/rule/flag-param.go
new file mode 100644
index 0000000000..6cb6daea9b
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/flag-param.go
@@ -0,0 +1,104 @@
+package rule
+
+import (
+ "fmt"
+ "github.com/mgechev/revive/lint"
+ "go/ast"
+)
+
+// FlagParamRule lints given else constructs.
+type FlagParamRule struct{}
+
+// Apply applies the rule to given file.
+func (r *FlagParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintFlagParamRule{onFailure: onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *FlagParamRule) Name() string {
+ return "flag-parameter"
+}
+
+type lintFlagParamRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintFlagParamRule) Visit(node ast.Node) ast.Visitor {
+ fd, ok := node.(*ast.FuncDecl)
+ if !ok {
+ return w
+ }
+
+ if fd.Body == nil {
+ return nil // skip whole function declaration
+ }
+
+ for _, p := range fd.Type.Params.List {
+ t := p.Type
+
+ id, ok := t.(*ast.Ident)
+ if !ok {
+ continue
+ }
+
+ if id.Name != "bool" {
+ continue
+ }
+
+ cv := conditionVisitor{p.Names, fd, w}
+ ast.Walk(cv, fd.Body)
+ }
+
+ return w
+}
+
+type conditionVisitor struct {
+ ids []*ast.Ident
+ fd *ast.FuncDecl
+ linter lintFlagParamRule
+}
+
+func (w conditionVisitor) Visit(node ast.Node) ast.Visitor {
+ ifStmt, ok := node.(*ast.IfStmt)
+ if !ok {
+ return w
+ }
+
+ fselect := func(n ast.Node) bool {
+ ident, ok := n.(*ast.Ident)
+ if !ok {
+ return false
+ }
+
+ for _, id := range w.ids {
+ if ident.Name == id.Name {
+ return true
+ }
+ }
+
+ return false
+ }
+
+ uses := pick(ifStmt.Cond, fselect, nil)
+
+ if len(uses) < 1 {
+ return w
+ }
+
+ w.linter.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: w.fd.Type.Params,
+ Category: "bad practice",
+ Failure: fmt.Sprintf("parameter '%s' seems to be a control flag, avoid control coupling", uses[0]),
+ })
+
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/function-result-limit.go b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
new file mode 100644
index 0000000000..1850fc4194
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/function-result-limit.go
@@ -0,0 +1,68 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// FunctionResultsLimitRule lints given else constructs.
+type FunctionResultsLimitRule struct{}
+
+// Apply applies the rule to given file.
+func (r *FunctionResultsLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ if len(arguments) != 1 {
+ panic(`invalid configuration for "function-result-limit"`)
+ }
+
+ max, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(fmt.Sprintf(`invalid value passed as return results number to the "function-result-limit" rule; need int64 but got %T`, arguments[0]))
+ }
+ if max < 0 {
+ panic(`the value passed as return results number to the "function-result-limit" rule cannot be negative`)
+ }
+
+ var failures []lint.Failure
+
+ walker := lintFunctionResultsNum{
+ max: int(max),
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *FunctionResultsLimitRule) Name() string {
+ return "function-result-limit"
+}
+
+type lintFunctionResultsNum struct {
+ max int
+ onFailure func(lint.Failure)
+}
+
+func (w lintFunctionResultsNum) Visit(n ast.Node) ast.Visitor {
+ node, ok := n.(*ast.FuncDecl)
+ if ok {
+ num := 0
+ if node.Type.Results != nil {
+ num = node.Type.Results.NumFields()
+ }
+ if num > w.max {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("maximum number of return results per function exceeded; max %d but got %d", w.max, num),
+ Node: node.Type,
+ })
+ return w
+ }
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/get-return.go b/vendor/github.com/mgechev/revive/rule/get-return.go
new file mode 100644
index 0000000000..494ab6669d
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/get-return.go
@@ -0,0 +1,70 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// GetReturnRule lints given else constructs.
+type GetReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (r *GetReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintReturnRule{onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *GetReturnRule) Name() string {
+ return "get-return"
+}
+
+type lintReturnRule struct {
+ onFailure func(lint.Failure)
+}
+
+func isGetter(name string) bool {
+ if strings.HasPrefix(strings.ToUpper(name), "GET") {
+ if len(name) > 3 {
+ c := name[3]
+ return !(c >= 'a' && c <= 'z')
+ }
+ }
+
+ return false
+}
+
+func hasResults(rs *ast.FieldList) bool {
+ return rs != nil && len(rs.List) > 0
+}
+
+func (w lintReturnRule) Visit(node ast.Node) ast.Visitor {
+ fd, ok := node.(*ast.FuncDecl)
+ if !ok {
+ return w
+ }
+
+ if !isGetter(fd.Name.Name) {
+ return w
+ }
+ if !hasResults(fd.Type.Results) {
+ w.onFailure(lint.Failure{
+ Confidence: 0.8,
+ Node: fd,
+ Category: "logic",
+ Failure: fmt.Sprintf("function '%s' seems to be a getter but it does not return any result", fd.Name.Name),
+ })
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/if-return.go b/vendor/github.com/mgechev/revive/rule/if-return.go
new file mode 100644
index 0000000000..c275d27662
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/if-return.go
@@ -0,0 +1,115 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// IfReturnRule lints given else constructs.
+type IfReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (r *IfReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ astFile := file.AST
+ w := &lintElseError{astFile, onFailure}
+ ast.Walk(w, astFile)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *IfReturnRule) Name() string {
+ return "if-return"
+}
+
+type lintElseError struct {
+ file *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintElseError) Visit(node ast.Node) ast.Visitor {
+ switch v := node.(type) {
+ case *ast.BlockStmt:
+ for i := 0; i < len(v.List)-1; i++ {
+ // if var := whatever; var != nil { return var }
+ s, ok := v.List[i].(*ast.IfStmt)
+ if !ok || s.Body == nil || len(s.Body.List) != 1 || s.Else != nil {
+ continue
+ }
+ assign, ok := s.Init.(*ast.AssignStmt)
+ if !ok || len(assign.Lhs) != 1 || !(assign.Tok == token.DEFINE || assign.Tok == token.ASSIGN) {
+ continue
+ }
+ id, ok := assign.Lhs[0].(*ast.Ident)
+ if !ok {
+ continue
+ }
+ expr, ok := s.Cond.(*ast.BinaryExpr)
+ if !ok || expr.Op != token.NEQ {
+ continue
+ }
+ if lhs, ok := expr.X.(*ast.Ident); !ok || lhs.Name != id.Name {
+ continue
+ }
+ if rhs, ok := expr.Y.(*ast.Ident); !ok || rhs.Name != "nil" {
+ continue
+ }
+ r, ok := s.Body.List[0].(*ast.ReturnStmt)
+ if !ok || len(r.Results) != 1 {
+ continue
+ }
+ if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != id.Name {
+ continue
+ }
+
+ // return nil
+ r, ok = v.List[i+1].(*ast.ReturnStmt)
+ if !ok || len(r.Results) != 1 {
+ continue
+ }
+ if r, ok := r.Results[0].(*ast.Ident); !ok || r.Name != "nil" {
+ continue
+ }
+
+ // check if there are any comments explaining the construct, don't emit an error if there are some.
+ if containsComments(s.Pos(), r.Pos(), w.file) {
+ continue
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: .9,
+ Node: v.List[i],
+ Failure: "redundant if ...; err != nil check, just return error instead.",
+ })
+ }
+ }
+ return w
+}
+
+func containsComments(start, end token.Pos, f *ast.File) bool {
+ for _, cgroup := range f.Comments {
+ comments := cgroup.List
+ if comments[0].Slash >= end {
+ // All comments starting with this group are after end pos.
+ return false
+ }
+ if comments[len(comments)-1].Slash < start {
+ // Comments group ends before start pos.
+ continue
+ }
+ for _, c := range comments {
+ if start <= c.Slash && c.Slash < end && !strings.HasPrefix(c.Text, "// MATCH ") {
+ return true
+ }
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/mgechev/revive/rule/import-shadowing.go b/vendor/github.com/mgechev/revive/rule/import-shadowing.go
new file mode 100644
index 0000000000..b78234c592
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/import-shadowing.go
@@ -0,0 +1,102 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ImportShadowingRule lints given else constructs.
+type ImportShadowingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ImportShadowingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ importNames := map[string]struct{}{}
+ for _, imp := range file.AST.Imports {
+ importNames[getName(imp)] = struct{}{}
+ }
+
+ fileAst := file.AST
+ walker := importShadowing{
+ importNames: importNames,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ alreadySeen: map[*ast.Object]struct{}{},
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ImportShadowingRule) Name() string {
+ return "import-shadowing"
+}
+
+func getName(imp *ast.ImportSpec) string {
+ const pathSep = "/"
+ const strDelim = `"`
+ if imp.Name != nil {
+ return imp.Name.Name
+ }
+
+ path := imp.Path.Value
+ i := strings.LastIndex(path, pathSep)
+ if i == -1 {
+ return strings.Trim(path, strDelim)
+ }
+
+ return strings.Trim(path[i+1:], strDelim)
+}
+
+type importShadowing struct {
+ importNames map[string]struct{}
+ onFailure func(lint.Failure)
+ alreadySeen map[*ast.Object]struct{}
+}
+
+// Visit visits AST nodes and checks if id nodes (ast.Ident) shadow an import name
+func (w importShadowing) Visit(n ast.Node) ast.Visitor {
+ switch n := n.(type) {
+ case *ast.AssignStmt:
+ if n.Tok == token.DEFINE {
+ return w // analyze variable declarations of the form id := expr
+ }
+
+ return nil // skip assigns of the form id = expr (not an id declaration)
+ case *ast.CallExpr, // skip call expressions (not an id declaration)
+ *ast.ImportSpec, // skip import section subtree because we already have the list of imports
+ *ast.KeyValueExpr, // skip analysis of key-val expressions ({key:value}): ids of such expressions, even the same of an import name, do not shadow the import name
+ *ast.ReturnStmt, // skip skipping analysis of returns, ids in expression were already analyzed
+ *ast.SelectorExpr, // skip analysis of selector expressions (anId.otherId): because if anId shadows an import name, it was already detected, and otherId does not shadows the import name
+ *ast.StructType: // skip analysis of struct type because struct fields can not shadow an import name
+ return nil
+ case *ast.Ident:
+ id := n.Name
+ if id == "_" {
+ return w // skip _ id
+ }
+
+ _, isImportName := w.importNames[id]
+ _, alreadySeen := w.alreadySeen[n.Obj]
+ if isImportName && !alreadySeen {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: n,
+ Category: "namming",
+ Failure: fmt.Sprintf("The name '%s' shadows an import name", id),
+ })
+
+ w.alreadySeen[n.Obj] = struct{}{}
+ }
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/imports-blacklist.go b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go
new file mode 100644
index 0000000000..31ef901e55
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/imports-blacklist.go
@@ -0,0 +1,52 @@
+package rule
+
+import (
+ "fmt"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ImportsBlacklistRule lints given else constructs.
+type ImportsBlacklistRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ if file.IsTest() {
+ return failures // skip, test file
+ }
+
+ blacklist := make(map[string]bool, len(arguments))
+
+ for _, arg := range arguments {
+ argStr, ok := arg.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg))
+ }
+ // we add quotes if not present, because when parsed, the value of the AST node, will be quoted
+ if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' {
+ argStr = fmt.Sprintf(`"%s"`, argStr)
+ }
+ blacklist[argStr] = true
+ }
+
+ for _, is := range file.AST.Imports {
+ path := is.Path
+ if path != nil && blacklist[path.Value] {
+ failures = append(failures, lint.Failure{
+ Confidence: 1,
+ Failure: "should not use the following blacklisted import: " + path.Value,
+ Node: is,
+ Category: "imports",
+ })
+ }
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ImportsBlacklistRule) Name() string {
+ return "imports-blacklist"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/increment-decrement.go b/vendor/github.com/mgechev/revive/rule/increment-decrement.go
new file mode 100644
index 0000000000..5d6b176719
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/increment-decrement.go
@@ -0,0 +1,74 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// IncrementDecrementRule lints given else constructs.
+type IncrementDecrementRule struct{}
+
+// Apply applies the rule to given file.
+func (r *IncrementDecrementRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintIncrementDecrement{
+ file: file,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *IncrementDecrementRule) Name() string {
+ return "increment-decrement"
+}
+
+type lintIncrementDecrement struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintIncrementDecrement) Visit(n ast.Node) ast.Visitor {
+ as, ok := n.(*ast.AssignStmt)
+ if !ok {
+ return w
+ }
+ if len(as.Lhs) != 1 {
+ return w
+ }
+ if !isOne(as.Rhs[0]) {
+ return w
+ }
+ var suffix string
+ switch as.Tok {
+ case token.ADD_ASSIGN:
+ suffix = "++"
+ case token.SUB_ASSIGN:
+ suffix = "--"
+ default:
+ return w
+ }
+ w.onFailure(lint.Failure{
+ Confidence: 0.8,
+ Node: as,
+ Category: "unary-op",
+ Failure: fmt.Sprintf("should replace %s with %s%s", w.file.Render(as), w.file.Render(as.Lhs[0]), suffix),
+ })
+ return w
+}
+
+func isOne(expr ast.Expr) bool {
+ lit, ok := expr.(*ast.BasicLit)
+ return ok && lit.Kind == token.INT && lit.Value == "1"
+}
diff --git a/vendor/github.com/mgechev/revive/rule/indent-error-flow.go b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
new file mode 100644
index 0000000000..4c9799b2a2
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/indent-error-flow.go
@@ -0,0 +1,78 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// IndentErrorFlowRule lints given else constructs.
+type IndentErrorFlowRule struct{}
+
+// Apply applies the rule to given file.
+func (r *IndentErrorFlowRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintElse{make(map[*ast.IfStmt]bool), onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *IndentErrorFlowRule) Name() string {
+ return "indent-error-flow"
+}
+
+type lintElse struct {
+ ignore map[*ast.IfStmt]bool
+ onFailure func(lint.Failure)
+}
+
+func (w lintElse) Visit(node ast.Node) ast.Visitor {
+ ifStmt, ok := node.(*ast.IfStmt)
+ if !ok || ifStmt.Else == nil {
+ return w
+ }
+ if w.ignore[ifStmt] {
+ if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
+ w.ignore[elseif] = true
+ }
+ return w
+ }
+ if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
+ w.ignore[elseif] = true
+ return w
+ }
+ if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
+ // only care about elses without conditions
+ return w
+ }
+ if len(ifStmt.Body.List) == 0 {
+ return w
+ }
+ shortDecl := false // does the if statement have a ":=" initialization statement?
+ if ifStmt.Init != nil {
+ if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
+ shortDecl = true
+ }
+ }
+ lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
+ if _, ok := lastStmt.(*ast.ReturnStmt); ok {
+ extra := ""
+ if shortDecl {
+ extra = " (move short variable declaration to its own line if necessary)"
+ }
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: ifStmt.Else,
+ Category: "indent",
+ Failure: "if block ends with a return statement, so drop this else and outdent its block" + extra,
+ })
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/line-length-limit.go b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
new file mode 100644
index 0000000000..5ee057079f
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/line-length-limit.go
@@ -0,0 +1,84 @@
+package rule
+
+import (
+ "bufio"
+ "bytes"
+ "fmt"
+ "go/token"
+ "strings"
+ "unicode/utf8"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// LineLengthLimitRule lints given else constructs.
+type LineLengthLimitRule struct{}
+
+// Apply applies the rule to given file.
+func (r *LineLengthLimitRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ if len(arguments) != 1 {
+ panic(`invalid configuration for "line-length-limit"`)
+ }
+
+ max, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok || max < 0 {
+ panic(`invalid value passed as argument number to the "line-length-limit" rule`)
+ }
+
+ var failures []lint.Failure
+ checker := lintLineLengthNum{
+ max: int(max),
+ file: file,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ checker.check()
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *LineLengthLimitRule) Name() string {
+ return "line-length-limit"
+}
+
+type lintLineLengthNum struct {
+ max int
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (r lintLineLengthNum) check() {
+ f := bytes.NewReader(r.file.Content())
+ spaces := strings.Repeat(" ", 4) // tab width = 4
+ l := 1
+ s := bufio.NewScanner(f)
+ for s.Scan() {
+ t := s.Text()
+ t = strings.Replace(t, "\t", spaces, -1)
+ c := utf8.RuneCountInString(t)
+ if c > r.max {
+ r.onFailure(lint.Failure{
+ Category: "code-style",
+ Position: lint.FailurePosition{
+ // Offset not set; it is non-trivial, and doesn't appear to be needed.
+ Start: token.Position{
+ Filename: r.file.Name,
+ Line: l,
+ Column: 0,
+ },
+ End: token.Position{
+ Filename: r.file.Name,
+ Line: l,
+ Column: c,
+ },
+ },
+ Confidence: 1,
+ Failure: fmt.Sprintf("line is %d characters, out of limit %d", c, r.max),
+ })
+ }
+ l++
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/max-public-structs.go b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
new file mode 100644
index 0000000000..9a2d07cbc1
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/max-public-structs.go
@@ -0,0 +1,67 @@
+package rule
+
+import (
+ "go/ast"
+
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// MaxPublicStructsRule lints given else constructs.
+type MaxPublicStructsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *MaxPublicStructsRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := &lintMaxPublicStructs{
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, fileAst)
+
+ max, ok := arguments[0].(int64) // Alt. non panicking version
+ if !ok {
+ panic(`invalid value passed as argument number to the "max-public-structs" rule`)
+ }
+
+ if walker.current > max {
+ walker.onFailure(lint.Failure{
+ Failure: "you have exceeded the maximum number of public struct declarations",
+ Confidence: 1,
+ Node: fileAst,
+ Category: "style",
+ })
+ }
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *MaxPublicStructsRule) Name() string {
+ return "max-public-structs"
+}
+
+type lintMaxPublicStructs struct {
+ current int64
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintMaxPublicStructs) Visit(n ast.Node) ast.Visitor {
+ switch v := n.(type) {
+ case *ast.TypeSpec:
+ name := v.Name.Name
+ first := string(name[0])
+ if strings.ToUpper(first) == first {
+ w.current++
+ }
+ break
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/modifies-param.go b/vendor/github.com/mgechev/revive/rule/modifies-param.go
new file mode 100644
index 0000000000..55136e6c82
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/modifies-param.go
@@ -0,0 +1,80 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ModifiesParamRule lints given else constructs.
+type ModifiesParamRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ModifiesParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintModifiesParamRule{onFailure: onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ModifiesParamRule) Name() string {
+ return "modifies-parameter"
+}
+
+type lintModifiesParamRule struct {
+ params map[string]bool
+ onFailure func(lint.Failure)
+}
+
+func retrieveParamNames(pl []*ast.Field) map[string]bool {
+ result := make(map[string]bool, len(pl))
+ for _, p := range pl {
+ for _, n := range p.Names {
+ if n.Name == "_" {
+ continue
+ }
+
+ result[n.Name] = true
+ }
+ }
+ return result
+}
+
+func (w lintModifiesParamRule) Visit(node ast.Node) ast.Visitor {
+ switch v := node.(type) {
+ case *ast.FuncDecl:
+ w.params = retrieveParamNames(v.Type.Params.List)
+ case *ast.IncDecStmt:
+ if id, ok := v.X.(*ast.Ident); ok {
+ checkParam(id, &w)
+ }
+ case *ast.AssignStmt:
+ lhs := v.Lhs
+ for _, e := range lhs {
+ id, ok := e.(*ast.Ident)
+ if ok {
+ checkParam(id, &w)
+ }
+ }
+ }
+
+ return w
+}
+
+func checkParam(id *ast.Ident, w *lintModifiesParamRule) {
+ if w.params[id.Name] {
+ w.onFailure(lint.Failure{
+ Confidence: 0.5, // confidence is low because of shadow variables
+ Node: id,
+ Category: "bad practice",
+ Failure: fmt.Sprintf("parameter '%s' seems to be modified", id),
+ })
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
new file mode 100644
index 0000000000..4fe22ddf3f
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/modifies-value-receiver.go
@@ -0,0 +1,134 @@
+package rule
+
+import (
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ModifiesValRecRule lints assignments to value method-receivers.
+type ModifiesValRecRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ModifiesValRecRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintModifiesValRecRule{file: file, onFailure: onFailure}
+ file.Pkg.TypeCheck()
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ModifiesValRecRule) Name() string {
+ return "modifies-value-receiver"
+}
+
+type lintModifiesValRecRule struct {
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintModifiesValRecRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ if n.Recv == nil {
+ return nil // skip, not a method
+ }
+
+ receiver := n.Recv.List[0]
+ if _, ok := receiver.Type.(*ast.StarExpr); ok {
+ return nil // skip, method with pointer receiver
+ }
+
+ if w.skipType(receiver.Type) {
+ return nil // skip, receiver is a map or array
+ }
+
+ if len(receiver.Names) < 1 {
+ return nil // skip, anonymous receiver
+ }
+
+ receiverName := receiver.Names[0].Name
+ if receiverName == "_" {
+ return nil // skip, anonymous receiver
+ }
+
+ fselect := func(n ast.Node) bool {
+ // look for assignments with the receiver in the right hand
+ asgmt, ok := n.(*ast.AssignStmt)
+ if !ok {
+ return false
+ }
+
+ for _, exp := range asgmt.Lhs {
+ switch e := exp.(type) {
+ case *ast.IndexExpr: // receiver...[] = ...
+ continue
+ case *ast.StarExpr: // *receiver = ...
+ continue
+ case *ast.SelectorExpr: // receiver.field = ...
+ name := w.getNameFromExpr(e.X)
+ if name == "" || name != receiverName {
+ continue
+ }
+
+ if w.skipType(ast.Expr(e.Sel)) {
+ continue
+ }
+
+ case *ast.Ident: // receiver := ...
+ if e.Name != receiverName {
+ continue
+ }
+ default:
+ continue
+ }
+
+ return true
+ }
+
+ return false
+ }
+
+ assignmentsToReceiver := pick(n.Body, fselect, nil)
+
+ for _, assignment := range assignmentsToReceiver {
+ w.onFailure(lint.Failure{
+ Node: assignment,
+ Confidence: 1,
+ Failure: "suspicious assignment to a by-value method receiver",
+ })
+ }
+ }
+
+ return w
+}
+
+func (w lintModifiesValRecRule) skipType(t ast.Expr) bool {
+ rt := w.file.Pkg.TypeOf(t)
+ if rt == nil {
+ return false
+ }
+
+ rt = rt.Underlying()
+ rtName := rt.String()
+
+ // skip when receiver is a map or array
+ return strings.HasPrefix(rtName, "[]") || strings.HasPrefix(rtName, "map[")
+}
+
+func (lintModifiesValRecRule) getNameFromExpr(ie ast.Expr) string {
+ ident, ok := ie.(*ast.Ident)
+ if !ok {
+ return ""
+ }
+
+ return ident.Name
+}
diff --git a/vendor/github.com/mgechev/revive/rule/package-comments.go b/vendor/github.com/mgechev/revive/rule/package-comments.go
new file mode 100644
index 0000000000..00fc5bb915
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/package-comments.go
@@ -0,0 +1,121 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// PackageCommentsRule lints the package comments. It complains if
+// there is no package comment, or if it is not of the right form.
+// This has a notable false positive in that a package comment
+// could rightfully appear in a different file of the same package,
+// but that's not easy to fix since this linter is file-oriented.
+type PackageCommentsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *PackageCommentsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ if isTest(file) {
+ return failures
+ }
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ fileAst := file.AST
+ w := &lintPackageComments{fileAst, file, onFailure}
+ ast.Walk(w, fileAst)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *PackageCommentsRule) Name() string {
+ return "package-comments"
+}
+
+type lintPackageComments struct {
+ fileAst *ast.File
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (l *lintPackageComments) Visit(_ ast.Node) ast.Visitor {
+ if l.file.IsTest() {
+ return nil
+ }
+
+ const ref = styleGuideBase + "#package-comments"
+ prefix := "Package " + l.fileAst.Name.Name + " "
+
+ // Look for a detached package comment.
+ // First, scan for the last comment that occurs before the "package" keyword.
+ var lastCG *ast.CommentGroup
+ for _, cg := range l.fileAst.Comments {
+ if cg.Pos() > l.fileAst.Package {
+ // Gone past "package" keyword.
+ break
+ }
+ lastCG = cg
+ }
+ if lastCG != nil && strings.HasPrefix(lastCG.Text(), prefix) {
+ endPos := l.file.ToPosition(lastCG.End())
+ pkgPos := l.file.ToPosition(l.fileAst.Package)
+ if endPos.Line+1 < pkgPos.Line {
+ // There isn't a great place to anchor this error;
+ // the start of the blank lines between the doc and the package statement
+ // is at least pointing at the location of the problem.
+ pos := token.Position{
+ Filename: endPos.Filename,
+ // Offset not set; it is non-trivial, and doesn't appear to be needed.
+ Line: endPos.Line + 1,
+ Column: 1,
+ }
+ l.onFailure(lint.Failure{
+ Category: "comments",
+ Position: lint.FailurePosition{
+ Start: pos,
+ End: pos,
+ },
+ Confidence: 0.9,
+ Failure: "package comment is detached; there should be no blank lines between it and the package statement",
+ })
+ return nil
+ }
+ }
+
+ if l.fileAst.Doc == nil {
+ l.onFailure(lint.Failure{
+ Category: "comments",
+ Node: l.fileAst,
+ Confidence: 0.2,
+ Failure: "should have a package comment, unless it's in another file for this package",
+ })
+ return nil
+ }
+ s := l.fileAst.Doc.Text()
+ if ts := strings.TrimLeft(s, " \t"); ts != s {
+ l.onFailure(lint.Failure{
+ Category: "comments",
+ Node: l.fileAst.Doc,
+ Confidence: 1,
+ Failure: "package comment should not have leading space",
+ })
+ s = ts
+ }
+ // Only non-main packages need to keep to this form.
+ if !l.file.Pkg.IsMain() && !strings.HasPrefix(s, prefix) {
+ l.onFailure(lint.Failure{
+ Category: "comments",
+ Node: l.fileAst.Doc,
+ Confidence: 1,
+ Failure: fmt.Sprintf(`package comment should be of the form "%s..."`, prefix),
+ })
+ }
+ return nil
+}
diff --git a/vendor/github.com/mgechev/revive/rule/range-val-address.go b/vendor/github.com/mgechev/revive/rule/range-val-address.go
new file mode 100644
index 0000000000..18554825a8
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/range-val-address.go
@@ -0,0 +1,113 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// RangeValAddress lints
+type RangeValAddress struct{}
+
+// Apply applies the rule to given file.
+func (r *RangeValAddress) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ walker := rangeValAddress{
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *RangeValAddress) Name() string {
+ return "range-val-address"
+}
+
+type rangeValAddress struct {
+ onFailure func(lint.Failure)
+}
+
+func (w rangeValAddress) Visit(node ast.Node) ast.Visitor {
+ n, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return w
+ }
+
+ value, ok := n.Value.(*ast.Ident)
+ if !ok {
+ return w
+ }
+
+ ast.Walk(rangeBodyVisitor{
+ valueID: value.Obj,
+ onFailure: w.onFailure,
+ }, n.Body)
+
+ return w
+}
+
+type rangeBodyVisitor struct {
+ valueID *ast.Object
+ onFailure func(lint.Failure)
+}
+
+func (bw rangeBodyVisitor) Visit(node ast.Node) ast.Visitor {
+ asgmt, ok := node.(*ast.AssignStmt)
+ if !ok {
+ return bw
+ }
+
+ for _, exp := range asgmt.Lhs {
+ e, ok := exp.(*ast.IndexExpr)
+ if !ok {
+ continue
+ }
+ if bw.isAccessingRangeValueAddress(e.Index) { // e.g. a[&value]...
+ bw.onFailure(bw.newFailure(e.Index))
+ }
+ }
+
+ for _, exp := range asgmt.Rhs {
+ switch e := exp.(type) {
+ case *ast.UnaryExpr: // e.g. ...&value
+ if bw.isAccessingRangeValueAddress(e) {
+ bw.onFailure(bw.newFailure(e))
+ }
+ case *ast.CallExpr:
+ if fun, ok := e.Fun.(*ast.Ident); ok && fun.Name == "append" { // e.g. ...append(arr, &value)
+ for _, v := range e.Args {
+ if bw.isAccessingRangeValueAddress(v) {
+ bw.onFailure(bw.newFailure(e))
+ }
+ }
+ }
+ }
+ }
+ return bw
+}
+
+func (bw rangeBodyVisitor) isAccessingRangeValueAddress(exp ast.Expr) bool {
+ u, ok := exp.(*ast.UnaryExpr)
+ if !ok {
+ return false
+ }
+
+ v, ok := u.X.(*ast.Ident)
+ return ok && u.Op == token.AND && v.Obj == bw.valueID
+}
+
+func (bw rangeBodyVisitor) newFailure(node ast.Node) lint.Failure {
+ return lint.Failure{
+ Node: node,
+ Confidence: 1,
+ Failure: fmt.Sprintf("suspicious assignment of '%s'. range-loop variables always have the same address", bw.valueID.Name),
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go
new file mode 100644
index 0000000000..857787be38
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/range-val-in-closure.go
@@ -0,0 +1,111 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// RangeValInClosureRule lints given else constructs.
+type RangeValInClosureRule struct{}
+
+// Apply applies the rule to given file.
+func (r *RangeValInClosureRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ walker := rangeValInClosure{
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *RangeValInClosureRule) Name() string {
+ return "range-val-in-closure"
+}
+
+type rangeValInClosure struct {
+ onFailure func(lint.Failure)
+}
+
+func (w rangeValInClosure) Visit(node ast.Node) ast.Visitor {
+
+ // Find the variables updated by the loop statement.
+ var vars []*ast.Ident
+ addVar := func(expr ast.Expr) {
+ if id, ok := expr.(*ast.Ident); ok {
+ vars = append(vars, id)
+ }
+ }
+ var body *ast.BlockStmt
+ switch n := node.(type) {
+ case *ast.RangeStmt:
+ body = n.Body
+ addVar(n.Key)
+ addVar(n.Value)
+ case *ast.ForStmt:
+ body = n.Body
+ switch post := n.Post.(type) {
+ case *ast.AssignStmt:
+ // e.g. for p = head; p != nil; p = p.next
+ for _, lhs := range post.Lhs {
+ addVar(lhs)
+ }
+ case *ast.IncDecStmt:
+ // e.g. for i := 0; i < n; i++
+ addVar(post.X)
+ }
+ }
+ if vars == nil {
+ return w
+ }
+
+ // Inspect a go or defer statement
+ // if it's the last one in the loop body.
+ // (We give up if there are following statements,
+ // because it's hard to prove go isn't followed by wait,
+ // or defer by return.)
+ if len(body.List) == 0 {
+ return w
+ }
+ var last *ast.CallExpr
+ switch s := body.List[len(body.List)-1].(type) {
+ case *ast.GoStmt:
+ last = s.Call
+ case *ast.DeferStmt:
+ last = s.Call
+ default:
+ return w
+ }
+ lit, ok := last.Fun.(*ast.FuncLit)
+ if !ok {
+ return w
+ }
+ if lit.Type == nil {
+ // Not referring to a variable (e.g. struct field name)
+ return w
+ }
+ ast.Inspect(lit.Body, func(n ast.Node) bool {
+ id, ok := n.(*ast.Ident)
+ if !ok || id.Obj == nil {
+ return true
+ }
+ for _, v := range vars {
+ if v.Obj == id.Obj {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Failure: fmt.Sprintf("loop variable %v captured by func literal", id.Name),
+ Node: n,
+ })
+ }
+ }
+ return true
+ })
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/range.go b/vendor/github.com/mgechev/revive/rule/range.go
new file mode 100644
index 0000000000..d18492c71a
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/range.go
@@ -0,0 +1,82 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// RangeRule lints given else constructs.
+type RangeRule struct{}
+
+// Apply applies the rule to given file.
+func (r *RangeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := &lintRanges{file, onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *RangeRule) Name() string {
+ return "range"
+}
+
+type lintRanges struct {
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintRanges) Visit(node ast.Node) ast.Visitor {
+ rs, ok := node.(*ast.RangeStmt)
+ if !ok {
+ return w
+ }
+ if rs.Value == nil {
+ // for x = range m { ... }
+ return w // single var form
+ }
+ if !isIdent(rs.Value, "_") {
+ // for ?, y = range m { ... }
+ return w
+ }
+
+ newRS := *rs // shallow copy
+ newRS.Value = nil
+
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("should omit 2nd value from range; this loop is equivalent to `for %s %s range ...`", w.file.Render(rs.Key), rs.Tok),
+ Confidence: 1,
+ Node: rs.Value,
+ ReplacementLine: firstLineOf(w.file, &newRS, rs),
+ })
+
+ return w
+}
+
+func firstLineOf(f *lint.File, node, match ast.Node) string {
+ line := f.Render(node)
+ if i := strings.Index(line, "\n"); i >= 0 {
+ line = line[:i]
+ }
+ return indentOf(f, match) + line
+}
+
+func indentOf(f *lint.File, node ast.Node) string {
+ line := srcLine(f.Content(), f.ToPosition(node.Pos()))
+ for i, r := range line {
+ switch r {
+ case ' ', '\t':
+ default:
+ return line[:i]
+ }
+ }
+ return line // unusual or empty line
+}
diff --git a/vendor/github.com/mgechev/revive/rule/receiver-naming.go b/vendor/github.com/mgechev/revive/rule/receiver-naming.go
new file mode 100644
index 0000000000..589d5f0ef3
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/receiver-naming.go
@@ -0,0 +1,81 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// ReceiverNamingRule lints given else constructs.
+type ReceiverNamingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *ReceiverNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintReceiverName{
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ typeReceiver: map[string]string{},
+ }
+
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *ReceiverNamingRule) Name() string {
+ return "receiver-naming"
+}
+
+type lintReceiverName struct {
+ onFailure func(lint.Failure)
+ typeReceiver map[string]string
+}
+
+func (w lintReceiverName) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok || fn.Recv == nil || len(fn.Recv.List) == 0 {
+ return w
+ }
+ names := fn.Recv.List[0].Names
+ if len(names) < 1 {
+ return w
+ }
+ name := names[0].Name
+ const ref = styleGuideBase + "#receiver-names"
+ if name == "_" {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Confidence: 1,
+ Category: "naming",
+ Failure: "receiver name should not be an underscore, omit the name if it is unused",
+ })
+ return w
+ }
+ if name == "this" || name == "self" {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Confidence: 1,
+ Category: "naming",
+ Failure: `receiver name should be a reflection of its identity; don't use generic names such as "this" or "self"`,
+ })
+ return w
+ }
+ recv := receiverType(fn)
+ if prev, ok := w.typeReceiver[recv]; ok && prev != name {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Confidence: 1,
+ Category: "naming",
+ Failure: fmt.Sprintf("receiver name %s should be consistent with previous receiver name %s for %s", name, prev, recv),
+ })
+ return w
+ }
+ w.typeReceiver[recv] = name
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
new file mode 100644
index 0000000000..947b8aac7c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/redefines-builtin-id.go
@@ -0,0 +1,145 @@
+package rule
+
+import (
+ "fmt"
+ "github.com/mgechev/revive/lint"
+ "go/ast"
+ "go/token"
+)
+
+// RedefinesBuiltinIDRule warns when a builtin identifier is shadowed.
+type RedefinesBuiltinIDRule struct{}
+
+// Apply applies the rule to given file.
+func (r *RedefinesBuiltinIDRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ var builtInConstAndVars = map[string]bool{
+ "true": true,
+ "false": true,
+ "iota": true,
+ "nil": true,
+ }
+
+ var builtFunctions = map[string]bool{
+ "append": true,
+ "cap": true,
+ "close": true,
+ "complex": true,
+ "copy": true,
+ "delete": true,
+ "imag": true,
+ "len": true,
+ "make": true,
+ "new": true,
+ "panic": true,
+ "print": true,
+ "println": true,
+ "real": true,
+ "recover": true,
+ }
+
+ var builtInTypes = map[string]bool{
+ "ComplexType": true,
+ "FloatType": true,
+ "IntegerType": true,
+ "Type": true,
+ "Type1": true,
+ "bool": true,
+ "byte": true,
+ "complex128": true,
+ "complex64": true,
+ "error": true,
+ "float32": true,
+ "float64": true,
+ "int": true,
+ "int16": true,
+ "int32": true,
+ "int64": true,
+ "int8": true,
+ "rune": true,
+ "string": true,
+ "uint": true,
+ "uint16": true,
+ "uint32": true,
+ "uint64": true,
+ "uint8": true,
+ "uintptr": true,
+ }
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ astFile := file.AST
+ w := &lintRedefinesBuiltinID{builtInConstAndVars, builtFunctions, builtInTypes, onFailure}
+ ast.Walk(w, astFile)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *RedefinesBuiltinIDRule) Name() string {
+ return "redefines-builtin-id"
+}
+
+type lintRedefinesBuiltinID struct {
+ constsAndVars map[string]bool
+ funcs map[string]bool
+ types map[string]bool
+ onFailure func(lint.Failure)
+}
+
+func (w *lintRedefinesBuiltinID) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.GenDecl:
+ if n.Tok != token.TYPE {
+ return nil // skip if not type declaration
+ }
+ typeSpec, ok := n.Specs[0].(*ast.TypeSpec)
+ if !ok {
+ return nil
+ }
+ id := typeSpec.Name.Name
+ if w.types[id] {
+ w.addFailure(n, fmt.Sprintf("redefinition of the built-in type %s", id))
+ }
+ case *ast.FuncDecl:
+ if n.Recv != nil {
+ return w // skip methods
+ }
+
+ id := n.Name.Name
+ if w.funcs[id] {
+ w.addFailure(n, fmt.Sprintf("redefinition of the built-in function %s", id))
+ }
+ case *ast.AssignStmt:
+ for _, e := range n.Lhs {
+ id, ok := e.(*ast.Ident)
+ if !ok {
+ continue
+ }
+
+ if w.constsAndVars[id.Name] {
+ var msg string
+ if n.Tok == token.DEFINE {
+ msg = fmt.Sprintf("assignment creates a shadow of built-in identifier %s", id.Name)
+ } else {
+ msg = fmt.Sprintf("assignment modifies built-in identifier %s", id.Name)
+ }
+ w.addFailure(n, msg)
+ }
+ }
+ }
+
+ return w
+}
+
+func (w lintRedefinesBuiltinID) addFailure(node ast.Node, msg string) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "logic",
+ Failure: msg,
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/string-of-int.go b/vendor/github.com/mgechev/revive/rule/string-of-int.go
new file mode 100644
index 0000000000..38f453a4aa
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/string-of-int.go
@@ -0,0 +1,95 @@
+package rule
+
+import (
+ "go/ast"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// StringOfIntRule warns when logic expressions contains Boolean literals.
+type StringOfIntRule struct{}
+
+// Apply applies the rule to given file.
+func (r *StringOfIntRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ astFile := file.AST
+ file.Pkg.TypeCheck()
+
+ w := &lintStringInt{file, onFailure}
+ ast.Walk(w, astFile)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *StringOfIntRule) Name() string {
+ return "string-of-int"
+}
+
+type lintStringInt struct {
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintStringInt) Visit(node ast.Node) ast.Visitor {
+ ce, ok := node.(*ast.CallExpr)
+ if !ok {
+ return w
+ }
+
+ if !w.isCallStringCast(ce.Fun) {
+ return w
+ }
+
+ if !w.isIntExpression(ce.Args) {
+ return w
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: ce,
+ Failure: "dubious convertion of an integer into a string, use strconv.Itoa",
+ })
+
+ return w
+}
+
+func (w *lintStringInt) isCallStringCast(e ast.Expr) bool {
+ t := w.file.Pkg.TypeOf(e)
+ if t == nil {
+ return false
+ }
+
+ tb, _ := t.Underlying().(*types.Basic)
+
+ return tb != nil && tb.Kind() == types.String
+}
+
+func (w *lintStringInt) isIntExpression(es []ast.Expr) bool {
+ if len(es) != 1 {
+ return false
+ }
+
+ t := w.file.Pkg.TypeOf(es[0])
+ if t == nil {
+ return false
+ }
+
+ ut, _ := t.Underlying().(*types.Basic)
+ if ut == nil || ut.Info()&types.IsInteger == 0 {
+ return false
+ }
+
+ switch ut.Kind() {
+ case types.Byte, types.Rune, types.UntypedRune:
+ return false
+ }
+
+ return true
+}
diff --git a/vendor/github.com/mgechev/revive/rule/struct-tag.go b/vendor/github.com/mgechev/revive/rule/struct-tag.go
new file mode 100644
index 0000000000..57cf8103a6
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/struct-tag.go
@@ -0,0 +1,236 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "strconv"
+ "strings"
+
+ "github.com/fatih/structtag"
+ "github.com/mgechev/revive/lint"
+)
+
+// StructTagRule lints struct tags.
+type StructTagRule struct{}
+
+// Apply applies the rule to given file.
+func (r *StructTagRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintStructTagRule{onFailure: onFailure}
+
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *StructTagRule) Name() string {
+ return "struct-tag"
+}
+
+type lintStructTagRule struct {
+ onFailure func(lint.Failure)
+ usedTagNbr map[string]bool // list of used tag numbers
+}
+
+func (w lintStructTagRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.StructType:
+ if n.Fields == nil || n.Fields.NumFields() < 1 {
+ return nil // skip empty structs
+ }
+ w.usedTagNbr = map[string]bool{} // init
+ for _, f := range n.Fields.List {
+ if f.Tag != nil {
+ w.checkTaggedField(f)
+ }
+ }
+ }
+
+ return w
+
+}
+
+// checkTaggedField checks the tag of the given field.
+// precondition: the field has a tag
+func (w lintStructTagRule) checkTaggedField(f *ast.Field) {
+ if len(f.Names) > 0 && !f.Names[0].IsExported() {
+ w.addFailure(f, "tag on not-exported field "+f.Names[0].Name)
+ }
+
+ tags, err := structtag.Parse(strings.Trim(f.Tag.Value, "`"))
+ if err != nil || tags == nil {
+ w.addFailure(f.Tag, "malformed tag")
+ return
+ }
+
+ for _, tag := range tags.Tags() {
+ switch key := tag.Key; key {
+ case "asn1":
+ msg, ok := w.checkASN1Tag(f.Type, tag)
+ if !ok {
+ w.addFailure(f.Tag, msg)
+ }
+ case "bson":
+ msg, ok := w.checkBSONTag(tag.Options)
+ if !ok {
+ w.addFailure(f.Tag, msg)
+ }
+ case "default":
+ if !w.typeValueMatch(f.Type, tag.Name) {
+ w.addFailure(f.Tag, "field's type and default value's type mismatch")
+ }
+ case "json":
+ msg, ok := w.checkJSONTag(tag.Name, tag.Options)
+ if !ok {
+ w.addFailure(f.Tag, msg)
+ }
+ case "protobuf":
+ // Not implemented yet
+ case "required":
+ if tag.Name != "true" && tag.Name != "false" {
+ w.addFailure(f.Tag, "required should be 'true' or 'false'")
+ }
+ case "xml":
+ msg, ok := w.checkXMLTag(tag.Options)
+ if !ok {
+ w.addFailure(f.Tag, msg)
+ }
+ case "yaml":
+ msg, ok := w.checkYAMLTag(tag.Options)
+ if !ok {
+ w.addFailure(f.Tag, msg)
+ }
+ default:
+ // unknown key
+ }
+ }
+}
+
+func (w lintStructTagRule) checkASN1Tag(t ast.Expr, tag *structtag.Tag) (string, bool) {
+ checkList := append(tag.Options, tag.Name)
+ for _, opt := range checkList {
+ switch opt {
+ case "application", "explicit", "generalized", "ia5", "omitempty", "optional", "set", "utf8":
+
+ default:
+ if strings.HasPrefix(opt, "tag:") {
+ parts := strings.Split(opt, ":")
+ tagNumber := parts[1]
+ if w.usedTagNbr[tagNumber] {
+ return fmt.Sprintf("duplicated tag number %s", tagNumber), false
+ }
+ w.usedTagNbr[tagNumber] = true
+
+ continue
+ }
+
+ if strings.HasPrefix(opt, "default:") {
+ parts := strings.Split(opt, ":")
+ if len(parts) < 2 {
+ return "malformed default for ASN1 tag", false
+ }
+ if !w.typeValueMatch(t, parts[1]) {
+ return "field's type and default value's type mismatch", false
+ }
+
+ continue
+ }
+
+ return fmt.Sprintf("unknown option '%s' in ASN1 tag", opt), false
+ }
+ }
+
+ return "", true
+}
+
+func (w lintStructTagRule) checkBSONTag(options []string) (string, bool) {
+ for _, opt := range options {
+ switch opt {
+ case "inline", "minsize", "omitempty":
+ default:
+ return fmt.Sprintf("unknown option '%s' in BSON tag", opt), false
+ }
+ }
+
+ return "", true
+}
+
+func (w lintStructTagRule) checkJSONTag(name string, options []string) (string, bool) {
+ for _, opt := range options {
+ switch opt {
+ case "omitempty", "string":
+ case "":
+ // special case for JSON key "-"
+ if name != "-" {
+ return "option can not be empty in JSON tag", false
+ }
+ default:
+ return fmt.Sprintf("unknown option '%s' in JSON tag", opt), false
+ }
+ }
+
+ return "", true
+}
+
+func (w lintStructTagRule) checkXMLTag(options []string) (string, bool) {
+ for _, opt := range options {
+ switch opt {
+ case "any", "attr", "cdata", "chardata", "comment", "innerxml", "omitempty", "typeattr":
+ default:
+ return fmt.Sprintf("unknown option '%s' in XML tag", opt), false
+ }
+ }
+
+ return "", true
+}
+
+func (w lintStructTagRule) checkYAMLTag(options []string) (string, bool) {
+ for _, opt := range options {
+ switch opt {
+ case "flow", "inline", "omitempty":
+ default:
+ return fmt.Sprintf("unknown option '%s' in YAML tag", opt), false
+ }
+ }
+
+ return "", true
+}
+
+func (w lintStructTagRule) typeValueMatch(t ast.Expr, val string) bool {
+ tID, ok := t.(*ast.Ident)
+ if !ok {
+ return true
+ }
+
+ typeMatches := true
+ switch tID.Name {
+ case "bool":
+ typeMatches = val == "true" || val == "false"
+ case "float64":
+ _, err := strconv.ParseFloat(val, 64)
+ typeMatches = err == nil
+ case "int":
+ _, err := strconv.ParseInt(val, 10, 64)
+ typeMatches = err == nil
+ case "string":
+ case "nil":
+ default:
+ // unchecked type
+ }
+
+ return typeMatches
+}
+
+func (w lintStructTagRule) addFailure(n ast.Node, msg string) {
+ w.onFailure(lint.Failure{
+ Node: n,
+ Failure: msg,
+ Confidence: 1,
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/superfluous-else.go b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
new file mode 100644
index 0000000000..c29be9e0d1
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/superfluous-else.go
@@ -0,0 +1,114 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// SuperfluousElseRule lints given else constructs.
+type SuperfluousElseRule struct{}
+
+// Apply applies the rule to given file.
+func (r *SuperfluousElseRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ var branchingFunctions = map[string]map[string]bool{
+ "os": map[string]bool{"Exit": true},
+ "log": map[string]bool{
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ },
+ }
+
+ w := lintSuperfluousElse{make(map[*ast.IfStmt]bool), onFailure, branchingFunctions}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *SuperfluousElseRule) Name() string {
+ return "superfluous-else"
+}
+
+type lintSuperfluousElse struct {
+ ignore map[*ast.IfStmt]bool
+ onFailure func(lint.Failure)
+ branchingFunctions map[string]map[string]bool
+}
+
+func (w lintSuperfluousElse) Visit(node ast.Node) ast.Visitor {
+ ifStmt, ok := node.(*ast.IfStmt)
+ if !ok || ifStmt.Else == nil {
+ return w
+ }
+ if w.ignore[ifStmt] {
+ if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
+ w.ignore[elseif] = true
+ }
+ return w
+ }
+ if elseif, ok := ifStmt.Else.(*ast.IfStmt); ok {
+ w.ignore[elseif] = true
+ return w
+ }
+ if _, ok := ifStmt.Else.(*ast.BlockStmt); !ok {
+ // only care about elses without conditions
+ return w
+ }
+ if len(ifStmt.Body.List) == 0 {
+ return w
+ }
+ shortDecl := false // does the if statement have a ":=" initialization statement?
+ if ifStmt.Init != nil {
+ if as, ok := ifStmt.Init.(*ast.AssignStmt); ok && as.Tok == token.DEFINE {
+ shortDecl = true
+ }
+ }
+ extra := ""
+ if shortDecl {
+ extra = " (move short variable declaration to its own line if necessary)"
+ }
+
+ lastStmt := ifStmt.Body.List[len(ifStmt.Body.List)-1]
+ switch stmt := lastStmt.(type) {
+ case *ast.BranchStmt:
+ token := stmt.Tok.String()
+ if token != "fallthrough" {
+ w.onFailure(newFailure(ifStmt.Else, "if block ends with a "+token+" statement, so drop this else and outdent its block"+extra))
+ }
+ case *ast.ExprStmt:
+ if ce, ok := stmt.X.(*ast.CallExpr); ok { // it's a function call
+ if fc, ok := ce.Fun.(*ast.SelectorExpr); ok {
+ if id, ok := fc.X.(*ast.Ident); ok {
+ fn := fc.Sel.Name
+ pkg := id.Name
+ if w.branchingFunctions[pkg][fn] { // it's a call to a branching function
+ w.onFailure(
+ newFailure(ifStmt.Else, fmt.Sprintf("if block ends with call to %s.%s function, so drop this else and outdent its block%s", pkg, fn, extra)))
+ }
+ }
+ }
+ }
+ }
+
+ return w
+}
+
+func newFailure(node ast.Node, msg string) lint.Failure {
+ return lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "indent",
+ Failure: msg,
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/time-naming.go b/vendor/github.com/mgechev/revive/rule/time-naming.go
new file mode 100644
index 0000000000..a93f4b5ae0
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/time-naming.go
@@ -0,0 +1,93 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// TimeNamingRule lints given else constructs.
+type TimeNamingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *TimeNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := &lintTimeNames{file, onFailure}
+
+ file.Pkg.TypeCheck()
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *TimeNamingRule) Name() string {
+ return "time-naming"
+}
+
+type lintTimeNames struct {
+ file *lint.File
+ onFailure func(lint.Failure)
+}
+
+func (w *lintTimeNames) Visit(node ast.Node) ast.Visitor {
+ v, ok := node.(*ast.ValueSpec)
+ if !ok {
+ return w
+ }
+ for _, name := range v.Names {
+ origTyp := w.file.Pkg.TypeOf(name)
+ // Look for time.Duration or *time.Duration;
+ // the latter is common when using flag.Duration.
+ typ := origTyp
+ if pt, ok := typ.(*types.Pointer); ok {
+ typ = pt.Elem()
+ }
+ if !isNamedType(typ, "time", "Duration") {
+ continue
+ }
+ suffix := ""
+ for _, suf := range timeSuffixes {
+ if strings.HasSuffix(name.Name, suf) {
+ suffix = suf
+ break
+ }
+ }
+ if suffix == "" {
+ continue
+ }
+ w.onFailure(lint.Failure{
+ Category: "time",
+ Confidence: 0.9,
+ Node: v,
+ Failure: fmt.Sprintf("var %s is of type %v; don't use unit-specific suffix %q", name.Name, origTyp, suffix),
+ })
+ }
+ return w
+}
+
+// timeSuffixes is a list of name suffixes that imply a time unit.
+// This is not an exhaustive list.
+var timeSuffixes = []string{
+ "Sec", "Secs", "Seconds",
+ "Msec", "Msecs",
+ "Milli", "Millis", "Milliseconds",
+ "Usec", "Usecs", "Microseconds",
+ "MS", "Ms",
+}
+
+func isNamedType(typ types.Type, importPath, name string) bool {
+ n, ok := typ.(*types.Named)
+ if !ok {
+ return false
+ }
+ tn := n.Obj()
+ return tn != nil && tn.Pkg() != nil && tn.Pkg().Path() == importPath && tn.Name() == name
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unexported-return.go b/vendor/github.com/mgechev/revive/rule/unexported-return.go
new file mode 100644
index 0000000000..c9c8a41d38
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unexported-return.go
@@ -0,0 +1,106 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnexportedReturnRule lints given else constructs.
+type UnexportedReturnRule struct{}
+
+// Apply applies the rule to given file.
+func (r *UnexportedReturnRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := lintUnexportedReturn{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ file.Pkg.TypeCheck()
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *UnexportedReturnRule) Name() string {
+ return "unexported-return"
+}
+
+type lintUnexportedReturn struct {
+ file *lint.File
+ fileAst *ast.File
+ onFailure func(lint.Failure)
+}
+
+func (w lintUnexportedReturn) Visit(n ast.Node) ast.Visitor {
+ fn, ok := n.(*ast.FuncDecl)
+ if !ok {
+ return w
+ }
+ if fn.Type.Results == nil {
+ return nil
+ }
+ if !fn.Name.IsExported() {
+ return nil
+ }
+ thing := "func"
+ if fn.Recv != nil && len(fn.Recv.List) > 0 {
+ thing = "method"
+ if !ast.IsExported(receiverType(fn)) {
+ // Don't report exported methods of unexported types,
+ // such as private implementations of sort.Interface.
+ return nil
+ }
+ }
+ for _, ret := range fn.Type.Results.List {
+ typ := w.file.Pkg.TypeOf(ret.Type)
+ if exportedType(typ) {
+ continue
+ }
+ w.onFailure(lint.Failure{
+ Category: "unexported-type-in-api",
+ Node: ret.Type,
+ Confidence: 0.8,
+ Failure: fmt.Sprintf("exported %s %s returns unexported type %s, which can be annoying to use",
+ thing, fn.Name.Name, typ),
+ })
+ break // only flag one
+ }
+ return nil
+}
+
+// exportedType reports whether typ is an exported type.
+// It is imprecise, and will err on the side of returning true,
+// such as for composite types.
+func exportedType(typ types.Type) bool {
+ switch T := typ.(type) {
+ case *types.Named:
+ obj := T.Obj()
+ switch {
+ // Builtin types have no package.
+ case obj.Pkg() == nil:
+ case obj.Exported():
+ default:
+ _, ok := T.Underlying().(*types.Interface)
+ return ok
+ }
+ return true
+ case *types.Map:
+ return exportedType(T.Key()) && exportedType(T.Elem())
+ case interface {
+ Elem() types.Type
+ }: // array, slice, pointer, chan
+ return exportedType(T.Elem())
+ }
+ // Be conservative about other types, such as struct, interface, etc.
+ return true
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unhandled-error.go b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
new file mode 100644
index 0000000000..0e2f628758
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unhandled-error.go
@@ -0,0 +1,120 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnhandledErrorRule lints given else constructs.
+type UnhandledErrorRule struct{}
+
+type ignoreListType map[string]struct{}
+
+// Apply applies the rule to given file.
+func (r *UnhandledErrorRule) Apply(file *lint.File, args lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ ignoreList := make(ignoreListType, len(args))
+
+ for _, arg := range args {
+ argStr, ok := arg.(string)
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the unhandled-error rule. Expecting a string, got %T", arg))
+ }
+
+ ignoreList[argStr] = struct{}{}
+ }
+
+ walker := &lintUnhandledErrors{
+ ignoreList: ignoreList,
+ pkg: file.Pkg,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ file.Pkg.TypeCheck()
+ ast.Walk(walker, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *UnhandledErrorRule) Name() string {
+ return "unhandled-error"
+}
+
+type lintUnhandledErrors struct {
+ ignoreList ignoreListType
+ pkg *lint.Package
+ onFailure func(lint.Failure)
+}
+
+// Visit looks for statements that are function calls.
+// If the called function returns a value of type error a failure will be created.
+func (w *lintUnhandledErrors) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.ExprStmt:
+ fCall, ok := n.X.(*ast.CallExpr)
+ if !ok {
+ return nil // not a function call
+ }
+
+ funcType := w.pkg.TypeOf(fCall)
+ if funcType == nil {
+ return nil // skip, type info not available
+ }
+
+ switch t := funcType.(type) {
+ case *types.Named:
+ if !w.isTypeError(t) {
+ return nil // func call does not return an error
+ }
+
+ w.addFailure(fCall)
+ default:
+ retTypes, ok := funcType.Underlying().(*types.Tuple)
+ if !ok {
+ return nil // skip, unable to retrieve return type of the called function
+ }
+
+ if w.returnsAnError(retTypes) {
+ w.addFailure(fCall)
+ }
+ }
+ }
+ return w
+}
+
+func (w *lintUnhandledErrors) addFailure(n *ast.CallExpr) {
+ funcName := gofmt(n.Fun)
+ if _, mustIgnore := w.ignoreList[funcName]; mustIgnore {
+ return
+ }
+
+ w.onFailure(lint.Failure{
+ Category: "bad practice",
+ Confidence: 1,
+ Node: n,
+ Failure: fmt.Sprintf("Unhandled error in call to function %v", funcName),
+ })
+}
+
+func (*lintUnhandledErrors) isTypeError(t *types.Named) bool {
+ const errorTypeName = "_.error"
+
+ return t.Obj().Id() == errorTypeName
+}
+
+func (w *lintUnhandledErrors) returnsAnError(tt *types.Tuple) bool {
+ for i := 0; i < tt.Len(); i++ {
+ nt, ok := tt.At(i).Type().(*types.Named)
+ if ok && w.isTypeError(nt) {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go b/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go
new file mode 100644
index 0000000000..732d8a8bb6
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unnecessary-stmt.go
@@ -0,0 +1,107 @@
+package rule
+
+import (
+ "go/ast"
+ "go/token"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnnecessaryStmtRule warns on unnecessary statements.
+type UnnecessaryStmtRule struct{}
+
+// Apply applies the rule to given file.
+func (r *UnnecessaryStmtRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintUnnecessaryStmtRule{onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *UnnecessaryStmtRule) Name() string {
+ return "unnecessary-stmt"
+}
+
+type lintUnnecessaryStmtRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintUnnecessaryStmtRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ if n.Body == nil || n.Type.Results != nil {
+ return w
+ }
+ stmts := n.Body.List
+ if len(stmts) == 0 {
+ return w
+ }
+
+ lastStmt := stmts[len(stmts)-1]
+ rs, ok := lastStmt.(*ast.ReturnStmt)
+ if !ok {
+ return w
+ }
+
+ if len(rs.Results) == 0 {
+ w.newFailure(lastStmt, "omit unnecessary return statement")
+ }
+
+ case *ast.SwitchStmt:
+ w.checkSwitchBody(n.Body)
+ case *ast.TypeSwitchStmt:
+ w.checkSwitchBody(n.Body)
+ case *ast.CaseClause:
+ if n.Body == nil {
+ return w
+ }
+ stmts := n.Body
+ if len(stmts) == 0 {
+ return w
+ }
+
+ lastStmt := stmts[len(stmts)-1]
+ rs, ok := lastStmt.(*ast.BranchStmt)
+ if !ok {
+ return w
+ }
+
+ if rs.Tok == token.BREAK && rs.Label == nil {
+ w.newFailure(lastStmt, "omit unnecessary break at the end of case clause")
+ }
+ }
+
+ return w
+}
+
+func (w lintUnnecessaryStmtRule) checkSwitchBody(b *ast.BlockStmt) {
+ cases := b.List
+ if len(cases) != 1 {
+ return
+ }
+
+ cc, ok := cases[0].(*ast.CaseClause)
+ if !ok {
+ return
+ }
+
+ if len(cc.List) > 1 { // skip cases with multiple expressions
+ return
+ }
+
+ w.newFailure(b, "switch with only one case can be replaced by an if-then")
+}
+
+func (w lintUnnecessaryStmtRule) newFailure(node ast.Node, msg string) {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "style",
+ Failure: msg,
+ })
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unreachable-code.go b/vendor/github.com/mgechev/revive/rule/unreachable-code.go
new file mode 100644
index 0000000000..c81e9e733b
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unreachable-code.go
@@ -0,0 +1,114 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnreachableCodeRule lints unreachable code.
+type UnreachableCodeRule struct{}
+
+// Apply applies the rule to given file.
+func (r *UnreachableCodeRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ var branchingFunctions = map[string]map[string]bool{
+ "os": map[string]bool{"Exit": true},
+ "log": map[string]bool{
+ "Fatal": true,
+ "Fatalf": true,
+ "Fatalln": true,
+ "Panic": true,
+ "Panicf": true,
+ "Panicln": true,
+ },
+ }
+
+ w := lintUnreachableCode{onFailure, branchingFunctions}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *UnreachableCodeRule) Name() string {
+ return "unreachable-code"
+}
+
+type lintUnreachableCode struct {
+ onFailure func(lint.Failure)
+ branchingFunctions map[string]map[string]bool
+}
+
+func (w lintUnreachableCode) Visit(node ast.Node) ast.Visitor {
+ blk, ok := node.(*ast.BlockStmt)
+ if !ok {
+ return w
+ }
+
+ if len(blk.List) < 2 {
+ return w
+ }
+loop:
+ for i, stmt := range blk.List[:len(blk.List)-1] {
+ // println("iterating ", len(blk.List))
+ next := blk.List[i+1]
+ if _, ok := next.(*ast.LabeledStmt); ok {
+ continue // skip if next statement is labeled
+ }
+
+ switch s := stmt.(type) {
+ case *ast.ReturnStmt:
+ w.onFailure(newUnreachableCodeFailure(s))
+ break loop
+ case *ast.BranchStmt:
+ token := s.Tok.String()
+ if token != "fallthrough" {
+ w.onFailure(newUnreachableCodeFailure(s))
+ break loop
+ }
+ case *ast.ExprStmt:
+ ce, ok := s.X.(*ast.CallExpr)
+ if !ok {
+ continue
+ }
+ // it's a function call
+ fc, ok := ce.Fun.(*ast.SelectorExpr)
+ if !ok {
+ continue
+ }
+
+ id, ok := fc.X.(*ast.Ident)
+
+ if !ok {
+ continue
+ }
+ fn := fc.Sel.Name
+ pkg := id.Name
+ if !w.branchingFunctions[pkg][fn] { // it isn't a call to a branching function
+ continue
+ }
+
+ if _, ok := next.(*ast.ReturnStmt); ok { // return statement needed to satisfy function signature
+ continue
+ }
+
+ w.onFailure(newUnreachableCodeFailure(s))
+ break loop
+ }
+ }
+
+ return w
+}
+
+func newUnreachableCodeFailure(node ast.Node) lint.Failure {
+ return lint.Failure{
+ Confidence: 1,
+ Node: node,
+ Category: "logic",
+ Failure: "unreachable code after this statement",
+ }
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unused-param.go b/vendor/github.com/mgechev/revive/rule/unused-param.go
new file mode 100644
index 0000000000..60df908d3d
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unused-param.go
@@ -0,0 +1,102 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnusedParamRule lints unused params in functions.
+type UnusedParamRule struct{}
+
+// Apply applies the rule to given file.
+func (r *UnusedParamRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintUnusedParamRule{onFailure: onFailure}
+
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *UnusedParamRule) Name() string {
+ return "unused-parameter"
+}
+
+type lintUnusedParamRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintUnusedParamRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ params := retrieveNamedParams(n.Type.Params)
+ if len(params) < 1 {
+ return nil // skip, func without parameters
+ }
+
+ if n.Body == nil {
+ return nil // skip, is a function prototype
+ }
+
+ // inspect the func body looking for references to parameters
+ fselect := func(n ast.Node) bool {
+ ident, isAnID := n.(*ast.Ident)
+
+ if !isAnID {
+ return false
+ }
+
+ _, isAParam := params[ident.Obj]
+ if isAParam {
+ params[ident.Obj] = false // mark as used
+ }
+
+ return false
+ }
+ _ = pick(n.Body, fselect, nil)
+
+ for _, p := range n.Type.Params.List {
+ for _, n := range p.Names {
+ if params[n.Obj] {
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: n,
+ Category: "bad practice",
+ Failure: fmt.Sprintf("parameter '%s' seems to be unused, consider removing or renaming it as _", n.Name),
+ })
+ }
+ }
+ }
+
+ return nil // full method body already inspected
+ }
+
+ return w
+}
+
+func retrieveNamedParams(params *ast.FieldList) map[*ast.Object]bool {
+ result := map[*ast.Object]bool{}
+ if params.List == nil {
+ return result
+ }
+
+ for _, p := range params.List {
+ for _, n := range p.Names {
+ if n.Name == "_" {
+ continue
+ }
+
+ result[n.Obj] = true
+ }
+ }
+
+ return result
+}
diff --git a/vendor/github.com/mgechev/revive/rule/unused-receiver.go b/vendor/github.com/mgechev/revive/rule/unused-receiver.go
new file mode 100644
index 0000000000..43eaf83a49
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/unused-receiver.go
@@ -0,0 +1,77 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// UnusedReceiverRule lints unused params in functions.
+type UnusedReceiverRule struct{}
+
+// Apply applies the rule to given file.
+func (_ *UnusedReceiverRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintUnusedReceiverRule{onFailure: onFailure}
+
+ ast.Walk(w, file.AST)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (_ *UnusedReceiverRule) Name() string {
+ return "unused-receiver"
+}
+
+type lintUnusedReceiverRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintUnusedReceiverRule) Visit(node ast.Node) ast.Visitor {
+ switch n := node.(type) {
+ case *ast.FuncDecl:
+ if n.Recv == nil {
+ return nil // skip this func decl, not a method
+ }
+
+ rec := n.Recv.List[0] // safe to access only the first (unique) element of the list
+ if len(rec.Names) < 1 {
+ return nil // the receiver is anonymous: func (aType) Foo(...) ...
+ }
+
+ recID := rec.Names[0]
+ if recID.Name == "_" {
+ return nil // the receiver is already named _
+ }
+
+ // inspect the func body looking for references to the receiver id
+ fselect := func(n ast.Node) bool {
+ ident, isAnID := n.(*ast.Ident)
+
+ return isAnID && ident.Obj == recID.Obj
+ }
+ refs2recID := pick(n.Body, fselect, nil)
+
+ if len(refs2recID) > 0 {
+ return nil // the receiver is referenced in the func body
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: recID,
+ Category: "bad practice",
+ Failure: fmt.Sprintf("method receiver '%s' is not referenced in method's body, consider removing or renaming it as _", recID.Name),
+ })
+
+ return nil // full method body already inspected
+ }
+
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/utils.go b/vendor/github.com/mgechev/revive/rule/utils.go
new file mode 100644
index 0000000000..6ba542b716
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/utils.go
@@ -0,0 +1,191 @@
+package rule
+
+import (
+ "bytes"
+ "fmt"
+ "go/ast"
+ "go/printer"
+ "go/token"
+ "go/types"
+ "regexp"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+const styleGuideBase = "https://golang.org/wiki/CodeReviewComments"
+
+// isBlank returns whether id is the blank identifier "_".
+// If id == nil, the answer is false.
+func isBlank(id *ast.Ident) bool { return id != nil && id.Name == "_" }
+
+func isTest(f *lint.File) bool {
+ return strings.HasSuffix(f.Name, "_test.go")
+}
+
+var commonMethods = map[string]bool{
+ "Error": true,
+ "Read": true,
+ "ServeHTTP": true,
+ "String": true,
+ "Write": true,
+}
+
+func receiverType(fn *ast.FuncDecl) string {
+ switch e := fn.Recv.List[0].Type.(type) {
+ case *ast.Ident:
+ return e.Name
+ case *ast.StarExpr:
+ if id, ok := e.X.(*ast.Ident); ok {
+ return id.Name
+ }
+ }
+ // The parser accepts much more than just the legal forms.
+ return "invalid-type"
+}
+
+var knownNameExceptions = map[string]bool{
+ "LastInsertId": true, // must match database/sql
+ "kWh": true,
+}
+
+func isCgoExported(f *ast.FuncDecl) bool {
+ if f.Recv != nil || f.Doc == nil {
+ return false
+ }
+
+ cgoExport := regexp.MustCompile(fmt.Sprintf("(?m)^//export %s$", regexp.QuoteMeta(f.Name.Name)))
+ for _, c := range f.Doc.List {
+ if cgoExport.MatchString(c.Text) {
+ return true
+ }
+ }
+ return false
+}
+
+var allCapsRE = regexp.MustCompile(`^[A-Z0-9_]+$`)
+
+func isIdent(expr ast.Expr, ident string) bool {
+ id, ok := expr.(*ast.Ident)
+ return ok && id.Name == ident
+}
+
+var zeroLiteral = map[string]bool{
+ "false": true, // bool
+ // runes
+ `'\x00'`: true,
+ `'\000'`: true,
+ // strings
+ `""`: true,
+ "``": true,
+ // numerics
+ "0": true,
+ "0.": true,
+ "0.0": true,
+ "0i": true,
+}
+
+func validType(T types.Type) bool {
+ return T != nil &&
+ T != types.Typ[types.Invalid] &&
+ !strings.Contains(T.String(), "invalid type") // good but not foolproof
+}
+
+func isPkgDot(expr ast.Expr, pkg, name string) bool {
+ sel, ok := expr.(*ast.SelectorExpr)
+ return ok && isIdent(sel.X, pkg) && isIdent(sel.Sel, name)
+}
+
+func srcLine(src []byte, p token.Position) string {
+ // Run to end of line in both directions if not at line start/end.
+ lo, hi := p.Offset, p.Offset+1
+ for lo > 0 && src[lo-1] != '\n' {
+ lo--
+ }
+ for hi < len(src) && src[hi-1] != '\n' {
+ hi++
+ }
+ return string(src[lo:hi])
+}
+
+// pick yields a list of nodes by picking them from a sub-ast with root node n.
+// Nodes are selected by applying the fselect function
+// f function is applied to each selected node before inseting it in the final result.
+// If f==nil then it defaults to the identity function (ie it returns the node itself)
+func pick(n ast.Node, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node {
+ var result []ast.Node
+
+ if n == nil {
+ return result
+ }
+
+ if f == nil {
+ f = func(n ast.Node) []ast.Node { return []ast.Node{n} }
+ }
+
+ onSelect := func(n ast.Node) {
+ result = append(result, f(n)...)
+ }
+ p := picker{fselect: fselect, onSelect: onSelect}
+ ast.Walk(p, n)
+ return result
+}
+
+func pickFromExpList(l []ast.Expr, fselect func(n ast.Node) bool, f func(n ast.Node) []ast.Node) []ast.Node {
+ result := make([]ast.Node, 0)
+ for _, e := range l {
+ result = append(result, pick(e, fselect, f)...)
+ }
+ return result
+}
+
+type picker struct {
+ fselect func(n ast.Node) bool
+ onSelect func(n ast.Node)
+}
+
+func (p picker) Visit(node ast.Node) ast.Visitor {
+ if p.fselect == nil {
+ return nil
+ }
+
+ if p.fselect(node) {
+ p.onSelect(node)
+ }
+
+ return p
+}
+
+// isBoolOp returns true if the given token corresponds to
+// a bool operator
+func isBoolOp(t token.Token) bool {
+ switch t {
+ case token.LAND, token.LOR, token.EQL, token.NEQ:
+ return true
+ }
+
+ return false
+}
+
+const (
+ trueName = "true"
+ falseName = "false"
+)
+
+func isExprABooleanLit(n ast.Node) (lexeme string, ok bool) {
+ oper, ok := n.(*ast.Ident)
+
+ if !ok {
+ return "", false
+ }
+
+ return oper.Name, (oper.Name == trueName || oper.Name == falseName)
+}
+
+// gofmt returns a string representation of the expression.
+func gofmt(x ast.Expr) string {
+ buf := bytes.Buffer{}
+ fs := token.NewFileSet()
+ printer.Fprint(&buf, fs, x)
+ return buf.String()
+}
diff --git a/vendor/github.com/mgechev/revive/rule/var-declarations.go b/vendor/github.com/mgechev/revive/rule/var-declarations.go
new file mode 100644
index 0000000000..441132115e
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/var-declarations.go
@@ -0,0 +1,120 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "go/types"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// VarDeclarationsRule lints given else constructs.
+type VarDeclarationsRule struct{}
+
+// Apply applies the rule to given file.
+func (r *VarDeclarationsRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ fileAst := file.AST
+ walker := &lintVarDeclarations{
+ file: file,
+ fileAst: fileAst,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ file.Pkg.TypeCheck()
+ ast.Walk(walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *VarDeclarationsRule) Name() string {
+ return "var-declaration"
+}
+
+type lintVarDeclarations struct {
+ fileAst *ast.File
+ file *lint.File
+ lastGen *ast.GenDecl
+ onFailure func(lint.Failure)
+}
+
+func (w *lintVarDeclarations) Visit(node ast.Node) ast.Visitor {
+ switch v := node.(type) {
+ case *ast.GenDecl:
+ if v.Tok != token.CONST && v.Tok != token.VAR {
+ return nil
+ }
+ w.lastGen = v
+ return w
+ case *ast.ValueSpec:
+ if w.lastGen.Tok == token.CONST {
+ return nil
+ }
+ if len(v.Names) > 1 || v.Type == nil || len(v.Values) == 0 {
+ return nil
+ }
+ rhs := v.Values[0]
+ // An underscore var appears in a common idiom for compile-time interface satisfaction,
+ // as in "var _ Interface = (*Concrete)(nil)".
+ if isIdent(v.Names[0], "_") {
+ return nil
+ }
+ // If the RHS is a zero value, suggest dropping it.
+ zero := false
+ if lit, ok := rhs.(*ast.BasicLit); ok {
+ zero = zeroLiteral[lit.Value]
+ } else if isIdent(rhs, "nil") {
+ zero = true
+ }
+ if zero {
+ w.onFailure(lint.Failure{
+ Confidence: 0.9,
+ Node: rhs,
+ Category: "zero-value",
+ Failure: fmt.Sprintf("should drop = %s from declaration of var %s; it is the zero value", w.file.Render(rhs), v.Names[0]),
+ })
+ return nil
+ }
+ lhsTyp := w.file.Pkg.TypeOf(v.Type)
+ rhsTyp := w.file.Pkg.TypeOf(rhs)
+
+ if !validType(lhsTyp) || !validType(rhsTyp) {
+ // Type checking failed (often due to missing imports).
+ return nil
+ }
+
+ if !types.Identical(lhsTyp, rhsTyp) {
+ // Assignment to a different type is not redundant.
+ return nil
+ }
+
+ // The next three conditions are for suppressing the warning in situations
+ // where we were unable to typecheck.
+
+ // If the LHS type is an interface, don't warn, since it is probably a
+ // concrete type on the RHS. Note that our feeble lexical check here
+ // will only pick up interface{} and other literal interface types;
+ // that covers most of the cases we care to exclude right now.
+ if _, ok := v.Type.(*ast.InterfaceType); ok {
+ return nil
+ }
+ // If the RHS is an untyped const, only warn if the LHS type is its default type.
+ if defType, ok := w.file.IsUntypedConst(rhs); ok && !isIdent(v.Type, defType) {
+ return nil
+ }
+
+ w.onFailure(lint.Failure{
+ Category: "type-inference",
+ Confidence: 0.8,
+ Node: v.Type,
+ Failure: fmt.Sprintf("should omit type %s from declaration of var %s; it will be inferred from the right-hand side", w.file.Render(v.Type), v.Names[0]),
+ })
+ return nil
+ }
+ return w
+}
diff --git a/vendor/github.com/mgechev/revive/rule/var-naming.go b/vendor/github.com/mgechev/revive/rule/var-naming.go
new file mode 100644
index 0000000000..768f65b966
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/var-naming.go
@@ -0,0 +1,230 @@
+package rule
+
+import (
+ "fmt"
+ "go/ast"
+ "go/token"
+ "strings"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// VarNamingRule lints given else constructs.
+type VarNamingRule struct{}
+
+// Apply applies the rule to given file.
+func (r *VarNamingRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ var whitelist []string
+ var blacklist []string
+
+ if len(arguments) >= 1 {
+ whitelist = getList(arguments[0], "whitelist")
+ }
+
+ if len(arguments) >= 2 {
+ blacklist = getList(arguments[1], "blacklist")
+ }
+
+ fileAst := file.AST
+ walker := lintNames{
+ file: file,
+ fileAst: fileAst,
+ whitelist: whitelist,
+ blacklist: blacklist,
+ onFailure: func(failure lint.Failure) {
+ failures = append(failures, failure)
+ },
+ }
+
+ // Package names need slightly different handling than other names.
+ if strings.Contains(walker.fileAst.Name.Name, "_") && !strings.HasSuffix(walker.fileAst.Name.Name, "_test") {
+ walker.onFailure(lint.Failure{
+ Failure: "don't use an underscore in package name",
+ Confidence: 1,
+ Node: walker.fileAst,
+ Category: "naming",
+ })
+ }
+
+ ast.Walk(&walker, fileAst)
+
+ return failures
+}
+
+// Name returns the rule name.
+func (r *VarNamingRule) Name() string {
+ return "var-naming"
+}
+
+func checkList(fl *ast.FieldList, thing string, w *lintNames) {
+ if fl == nil {
+ return
+ }
+ for _, f := range fl.List {
+ for _, id := range f.Names {
+ check(id, thing, w)
+ }
+ }
+}
+
+func check(id *ast.Ident, thing string, w *lintNames) {
+ if id.Name == "_" {
+ return
+ }
+ if knownNameExceptions[id.Name] {
+ return
+ }
+
+ // Handle two common styles from other languages that don't belong in Go.
+ if len(id.Name) >= 5 && allCapsRE.MatchString(id.Name) && strings.Contains(id.Name, "_") {
+ w.onFailure(lint.Failure{
+ Failure: "don't use ALL_CAPS in Go names; use CamelCase",
+ Confidence: 0.8,
+ Node: id,
+ Category: "naming",
+ })
+ return
+ }
+ if len(id.Name) > 2 && id.Name[0] == 'k' && id.Name[1] >= 'A' && id.Name[1] <= 'Z' {
+ should := string(id.Name[1]+'a'-'A') + id.Name[2:]
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("don't use leading k in Go names; %s %s should be %s", thing, id.Name, should),
+ Confidence: 0.8,
+ Node: id,
+ Category: "naming",
+ })
+ }
+
+ should := lint.Name(id.Name, w.whitelist, w.blacklist)
+ if id.Name == should {
+ return
+ }
+
+ if len(id.Name) > 2 && strings.Contains(id.Name[1:], "_") {
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("don't use underscores in Go names; %s %s should be %s", thing, id.Name, should),
+ Confidence: 0.9,
+ Node: id,
+ Category: "naming",
+ })
+ return
+ }
+ w.onFailure(lint.Failure{
+ Failure: fmt.Sprintf("%s %s should be %s", thing, id.Name, should),
+ Confidence: 0.8,
+ Node: id,
+ Category: "naming",
+ })
+}
+
+type lintNames struct {
+ file *lint.File
+ fileAst *ast.File
+ lastGen *ast.GenDecl
+ genDeclMissingComments map[*ast.GenDecl]bool
+ onFailure func(lint.Failure)
+ whitelist []string
+ blacklist []string
+}
+
+func (w *lintNames) Visit(n ast.Node) ast.Visitor {
+ switch v := n.(type) {
+ case *ast.AssignStmt:
+ if v.Tok == token.ASSIGN {
+ return w
+ }
+ for _, exp := range v.Lhs {
+ if id, ok := exp.(*ast.Ident); ok {
+ check(id, "var", w)
+ }
+ }
+ case *ast.FuncDecl:
+ if w.file.IsTest() && (strings.HasPrefix(v.Name.Name, "Example") || strings.HasPrefix(v.Name.Name, "Test") || strings.HasPrefix(v.Name.Name, "Benchmark")) {
+ return w
+ }
+
+ thing := "func"
+ if v.Recv != nil {
+ thing = "method"
+ }
+
+ // Exclude naming warnings for functions that are exported to C but
+ // not exported in the Go API.
+ // See https://github.com/golang/lint/issues/144.
+ if ast.IsExported(v.Name.Name) || !isCgoExported(v) {
+ check(v.Name, thing, w)
+ }
+
+ checkList(v.Type.Params, thing+" parameter", w)
+ checkList(v.Type.Results, thing+" result", w)
+ case *ast.GenDecl:
+ if v.Tok == token.IMPORT {
+ return w
+ }
+ var thing string
+ switch v.Tok {
+ case token.CONST:
+ thing = "const"
+ case token.TYPE:
+ thing = "type"
+ case token.VAR:
+ thing = "var"
+ }
+ for _, spec := range v.Specs {
+ switch s := spec.(type) {
+ case *ast.TypeSpec:
+ check(s.Name, thing, w)
+ case *ast.ValueSpec:
+ for _, id := range s.Names {
+ check(id, thing, w)
+ }
+ }
+ }
+ case *ast.InterfaceType:
+ // Do not check interface method names.
+ // They are often constrainted by the method names of concrete types.
+ for _, x := range v.Methods.List {
+ ft, ok := x.Type.(*ast.FuncType)
+ if !ok { // might be an embedded interface name
+ continue
+ }
+ checkList(ft.Params, "interface method parameter", w)
+ checkList(ft.Results, "interface method result", w)
+ }
+ case *ast.RangeStmt:
+ if v.Tok == token.ASSIGN {
+ return w
+ }
+ if id, ok := v.Key.(*ast.Ident); ok {
+ check(id, "range var", w)
+ }
+ if id, ok := v.Value.(*ast.Ident); ok {
+ check(id, "range var", w)
+ }
+ case *ast.StructType:
+ for _, f := range v.Fields.List {
+ for _, id := range f.Names {
+ check(id, "struct field", w)
+ }
+ }
+ }
+ return w
+}
+
+func getList(arg interface{}, argName string) []string {
+ temp, ok := arg.([]interface{})
+ if !ok {
+ panic(fmt.Sprintf("Invalid argument to the var-naming rule. Expecting a %s of type slice with initialisms, got %T", argName, arg))
+ }
+ var list []string
+ for _, v := range temp {
+ if val, ok := v.(string); ok {
+ list = append(list, val)
+ } else {
+ panic(fmt.Sprintf("Invalid %s values of the var-naming rule. Expecting slice of strings but got element of type %T", val, arg))
+ }
+ }
+ return list
+}
diff --git a/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
new file mode 100644
index 0000000000..b86929136c
--- /dev/null
+++ b/vendor/github.com/mgechev/revive/rule/waitgroup-by-value.go
@@ -0,0 +1,66 @@
+package rule
+
+import (
+ "go/ast"
+
+ "github.com/mgechev/revive/lint"
+)
+
+// WaitGroupByValueRule lints sync.WaitGroup passed by copy in functions.
+type WaitGroupByValueRule struct{}
+
+// Apply applies the rule to given file.
+func (r *WaitGroupByValueRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
+ var failures []lint.Failure
+
+ onFailure := func(failure lint.Failure) {
+ failures = append(failures, failure)
+ }
+
+ w := lintWaitGroupByValueRule{onFailure: onFailure}
+ ast.Walk(w, file.AST)
+ return failures
+}
+
+// Name returns the rule name.
+func (r *WaitGroupByValueRule) Name() string {
+ return "waitgroup-by-value"
+}
+
+type lintWaitGroupByValueRule struct {
+ onFailure func(lint.Failure)
+}
+
+func (w lintWaitGroupByValueRule) Visit(node ast.Node) ast.Visitor {
+ // look for function declarations
+ fd, ok := node.(*ast.FuncDecl)
+ if !ok {
+ return w
+ }
+
+ // Check all function's parameters
+ for _, field := range fd.Type.Params.List {
+ if !w.isWaitGroup(field.Type) {
+ continue
+ }
+
+ w.onFailure(lint.Failure{
+ Confidence: 1,
+ Node: field,
+ Failure: "sync.WaitGroup passed by value, the function will get a copy of the original one",
+ })
+ }
+
+ return nil
+}
+
+func (lintWaitGroupByValueRule) isWaitGroup(ft ast.Expr) bool {
+ se, ok := ft.(*ast.SelectorExpr)
+ if !ok {
+ return false
+ }
+
+ x, _ := se.X.(*ast.Ident)
+ sel := se.Sel.Name
+ return x.Name == "sync" && sel == "WaitGroup"
+}
diff --git a/vendor/github.com/olekukonko/tablewriter/.gitignore b/vendor/github.com/olekukonko/tablewriter/.gitignore
new file mode 100644
index 0000000000..b66cec635a
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/.gitignore
@@ -0,0 +1,15 @@
+# Created by .ignore support plugin (hsz.mobi)
+### Go template
+# Binaries for programs and plugins
+*.exe
+*.exe~
+*.dll
+*.so
+*.dylib
+
+# Test binary, build with `go test -c`
+*.test
+
+# Output of the go coverage tool, specifically when used with LiteIDE
+*.out
+
diff --git a/vendor/github.com/olekukonko/tablewriter/.travis.yml b/vendor/github.com/olekukonko/tablewriter/.travis.yml
new file mode 100644
index 0000000000..9c64270e2e
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+
+go:
+ - 1.1
+ - 1.2
+ - 1.3
+ - 1.4
+ - 1.5
+ - 1.6
+ - 1.7
+ - 1.8
+ - 1.9
+ - "1.10"
+ - tip
diff --git a/vendor/github.com/olekukonko/tablewriter/LICENSE.md b/vendor/github.com/olekukonko/tablewriter/LICENSE.md
new file mode 100644
index 0000000000..a0769b5c15
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/LICENSE.md
@@ -0,0 +1,19 @@
+Copyright (C) 2014 by Oleku Konko
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/olekukonko/tablewriter/README.md b/vendor/github.com/olekukonko/tablewriter/README.md
new file mode 100644
index 0000000000..cb9b2ef464
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/README.md
@@ -0,0 +1,396 @@
+ASCII Table Writer
+=========
+
+[![Build Status](https://travis-ci.org/olekukonko/tablewriter.png?branch=master)](https://travis-ci.org/olekukonko/tablewriter)
+[![Total views](https://img.shields.io/sourcegraph/rrc/github.com/olekukonko/tablewriter.svg)](https://sourcegraph.com/github.com/olekukonko/tablewriter)
+[![Godoc](https://godoc.org/github.com/olekukonko/tablewriter?status.svg)](https://godoc.org/github.com/olekukonko/tablewriter)
+
+Generate ASCII table on the fly ... Installation is simple as
+
+ go get github.com/olekukonko/tablewriter
+
+
+#### Features
+- Automatic Padding
+- Support Multiple Lines
+- Supports Alignment
+- Support Custom Separators
+- Automatic Alignment of numbers & percentage
+- Write directly to http , file etc via `io.Writer`
+- Read directly from CSV file
+- Optional row line via `SetRowLine`
+- Normalise table header
+- Make CSV Headers optional
+- Enable or disable table border
+- Set custom footer support
+- Optional identical cells merging
+- Set custom caption
+- Optional reflowing of paragrpahs in multi-line cells.
+
+#### Example 1 - Basic
+```go
+data := [][]string{
+ []string{"A", "The Good", "500"},
+ []string{"B", "The Very very Bad Man", "288"},
+ []string{"C", "The Ugly", "120"},
+ []string{"D", "The Gopher", "800"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Name", "Sign", "Rating"})
+
+for _, v := range data {
+ table.Append(v)
+}
+table.Render() // Send output
+```
+
+##### Output 1
+```
++------+-----------------------+--------+
+| NAME | SIGN | RATING |
++------+-----------------------+--------+
+| A | The Good | 500 |
+| B | The Very very Bad Man | 288 |
+| C | The Ugly | 120 |
+| D | The Gopher | 800 |
++------+-----------------------+--------+
+```
+
+#### Example 2 - Without Border / Footer / Bulk Append
+```go
+data := [][]string{
+ []string{"1/1/2014", "Domain name", "2233", "$10.98"},
+ []string{"1/1/2014", "January Hosting", "2233", "$54.95"},
+ []string{"1/4/2014", "February Hosting", "2233", "$51.00"},
+ []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
+table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer
+table.SetBorder(false) // Set Border to false
+table.AppendBulk(data) // Add Bulk Data
+table.Render()
+```
+
+##### Output 2
+```
+
+ DATE | DESCRIPTION | CV2 | AMOUNT
+-----------+--------------------------+-------+----------
+ 1/1/2014 | Domain name | 2233 | $10.98
+ 1/1/2014 | January Hosting | 2233 | $54.95
+ 1/4/2014 | February Hosting | 2233 | $51.00
+ 1/4/2014 | February Extra Bandwidth | 2233 | $30.00
+-----------+--------------------------+-------+----------
+ TOTAL | $146 93
+ --------+----------
+
+```
+
+
+#### Example 3 - CSV
+```go
+table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test_info.csv", true)
+table.SetAlignment(tablewriter.ALIGN_LEFT) // Set Alignment
+table.Render()
+```
+
+##### Output 3
+```
++----------+--------------+------+-----+---------+----------------+
+| FIELD | TYPE | NULL | KEY | DEFAULT | EXTRA |
++----------+--------------+------+-----+---------+----------------+
+| user_id | smallint(5) | NO | PRI | NULL | auto_increment |
+| username | varchar(10) | NO | | NULL | |
+| password | varchar(100) | NO | | NULL | |
++----------+--------------+------+-----+---------+----------------+
+```
+
+#### Example 4 - Custom Separator
+```go
+table, _ := tablewriter.NewCSV(os.Stdout, "testdata/test.csv", true)
+table.SetRowLine(true) // Enable row line
+
+// Change table lines
+table.SetCenterSeparator("*")
+table.SetColumnSeparator("╪")
+table.SetRowSeparator("-")
+
+table.SetAlignment(tablewriter.ALIGN_LEFT)
+table.Render()
+```
+
+##### Output 4
+```
+*------------*-----------*---------*
+╪ FIRST NAME ╪ LAST NAME ╪ SSN ╪
+*------------*-----------*---------*
+╪ John ╪ Barry ╪ 123456 ╪
+*------------*-----------*---------*
+╪ Kathy ╪ Smith ╪ 687987 ╪
+*------------*-----------*---------*
+╪ Bob ╪ McCornick ╪ 3979870 ╪
+*------------*-----------*---------*
+```
+
+#### Example 5 - Markdown Format
+```go
+data := [][]string{
+ []string{"1/1/2014", "Domain name", "2233", "$10.98"},
+ []string{"1/1/2014", "January Hosting", "2233", "$54.95"},
+ []string{"1/4/2014", "February Hosting", "2233", "$51.00"},
+ []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
+table.SetBorders(tablewriter.Border{Left: true, Top: false, Right: true, Bottom: false})
+table.SetCenterSeparator("|")
+table.AppendBulk(data) // Add Bulk Data
+table.Render()
+```
+
+##### Output 5
+```
+| DATE | DESCRIPTION | CV2 | AMOUNT |
+|----------|--------------------------|------|--------|
+| 1/1/2014 | Domain name | 2233 | $10.98 |
+| 1/1/2014 | January Hosting | 2233 | $54.95 |
+| 1/4/2014 | February Hosting | 2233 | $51.00 |
+| 1/4/2014 | February Extra Bandwidth | 2233 | $30.00 |
+```
+
+#### Example 6 - Identical cells merging
+```go
+data := [][]string{
+ []string{"1/1/2014", "Domain name", "1234", "$10.98"},
+ []string{"1/1/2014", "January Hosting", "2345", "$54.95"},
+ []string{"1/4/2014", "February Hosting", "3456", "$51.00"},
+ []string{"1/4/2014", "February Extra Bandwidth", "4567", "$30.00"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
+table.SetFooter([]string{"", "", "Total", "$146.93"})
+table.SetAutoMergeCells(true)
+table.SetRowLine(true)
+table.AppendBulk(data)
+table.Render()
+```
+
+##### Output 6
+```
++----------+--------------------------+-------+---------+
+| DATE | DESCRIPTION | CV2 | AMOUNT |
++----------+--------------------------+-------+---------+
+| 1/1/2014 | Domain name | 1234 | $10.98 |
++ +--------------------------+-------+---------+
+| | January Hosting | 2345 | $54.95 |
++----------+--------------------------+-------+---------+
+| 1/4/2014 | February Hosting | 3456 | $51.00 |
++ +--------------------------+-------+---------+
+| | February Extra Bandwidth | 4567 | $30.00 |
++----------+--------------------------+-------+---------+
+| TOTAL | $146 93 |
++----------+--------------------------+-------+---------+
+```
+
+
+#### Table with color
+```go
+data := [][]string{
+ []string{"1/1/2014", "Domain name", "2233", "$10.98"},
+ []string{"1/1/2014", "January Hosting", "2233", "$54.95"},
+ []string{"1/4/2014", "February Hosting", "2233", "$51.00"},
+ []string{"1/4/2014", "February Extra Bandwidth", "2233", "$30.00"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Date", "Description", "CV2", "Amount"})
+table.SetFooter([]string{"", "", "Total", "$146.93"}) // Add Footer
+table.SetBorder(false) // Set Border to false
+
+table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor},
+ tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor},
+ tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor},
+ tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor})
+
+table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor})
+
+table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{},
+ tablewriter.Colors{tablewriter.Bold},
+ tablewriter.Colors{tablewriter.FgHiRedColor})
+
+table.AppendBulk(data)
+table.Render()
+```
+
+#### Table with color Output
+![Table with Color](https://cloud.githubusercontent.com/assets/6460392/21101956/bbc7b356-c0a1-11e6-9f36-dba694746efc.png)
+
+#### Example - 7 Table Cells with Color
+
+Individual Cell Colors from `func Rich` take precedence over Column Colors
+
+```go
+data := [][]string{
+ []string{"Test1Merge", "HelloCol2 - 1", "HelloCol3 - 1", "HelloCol4 - 1"},
+ []string{"Test1Merge", "HelloCol2 - 2", "HelloCol3 - 2", "HelloCol4 - 2"},
+ []string{"Test1Merge", "HelloCol2 - 3", "HelloCol3 - 3", "HelloCol4 - 3"},
+ []string{"Test2Merge", "HelloCol2 - 4", "HelloCol3 - 4", "HelloCol4 - 4"},
+ []string{"Test2Merge", "HelloCol2 - 5", "HelloCol3 - 5", "HelloCol4 - 5"},
+ []string{"Test2Merge", "HelloCol2 - 6", "HelloCol3 - 6", "HelloCol4 - 6"},
+ []string{"Test2Merge", "HelloCol2 - 7", "HelloCol3 - 7", "HelloCol4 - 7"},
+ []string{"Test3Merge", "HelloCol2 - 8", "HelloCol3 - 8", "HelloCol4 - 8"},
+ []string{"Test3Merge", "HelloCol2 - 9", "HelloCol3 - 9", "HelloCol4 - 9"},
+ []string{"Test3Merge", "HelloCol2 - 10", "HelloCol3 -10", "HelloCol4 - 10"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Col1", "Col2", "Col3", "Col4"})
+table.SetFooter([]string{"", "", "Footer3", "Footer4"})
+table.SetBorder(false)
+
+table.SetHeaderColor(tablewriter.Colors{tablewriter.Bold, tablewriter.BgGreenColor},
+ tablewriter.Colors{tablewriter.FgHiRedColor, tablewriter.Bold, tablewriter.BgBlackColor},
+ tablewriter.Colors{tablewriter.BgRedColor, tablewriter.FgWhiteColor},
+ tablewriter.Colors{tablewriter.BgCyanColor, tablewriter.FgWhiteColor})
+
+table.SetColumnColor(tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiRedColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiBlackColor},
+ tablewriter.Colors{tablewriter.Bold, tablewriter.FgBlackColor})
+
+table.SetFooterColor(tablewriter.Colors{}, tablewriter.Colors{},
+ tablewriter.Colors{tablewriter.Bold},
+ tablewriter.Colors{tablewriter.FgHiRedColor})
+
+colorData1 := []string{"TestCOLOR1Merge", "HelloCol2 - COLOR1", "HelloCol3 - COLOR1", "HelloCol4 - COLOR1"}
+colorData2 := []string{"TestCOLOR2Merge", "HelloCol2 - COLOR2", "HelloCol3 - COLOR2", "HelloCol4 - COLOR2"}
+
+for i, row := range data {
+ if i == 4 {
+ table.Rich(colorData1, []tablewriter.Colors{tablewriter.Colors{}, tablewriter.Colors{tablewriter.Normal, tablewriter.FgCyanColor}, tablewriter.Colors{tablewriter.Bold, tablewriter.FgWhiteColor}, tablewriter.Colors{}})
+ table.Rich(colorData2, []tablewriter.Colors{tablewriter.Colors{tablewriter.Normal, tablewriter.FgMagentaColor}, tablewriter.Colors{}, tablewriter.Colors{tablewriter.Bold, tablewriter.BgRedColor}, tablewriter.Colors{tablewriter.FgHiGreenColor, tablewriter.Italic, tablewriter.BgHiCyanColor}})
+ }
+ table.Append(row)
+}
+
+table.SetAutoMergeCells(true)
+table.Render()
+
+```
+
+##### Table cells with color Output
+![Table cells with Color](https://user-images.githubusercontent.com/9064687/63969376-bcd88d80-ca6f-11e9-9466-c3d954700b25.png)
+
+#### Example 8 - Set table caption
+```go
+data := [][]string{
+ []string{"A", "The Good", "500"},
+ []string{"B", "The Very very Bad Man", "288"},
+ []string{"C", "The Ugly", "120"},
+ []string{"D", "The Gopher", "800"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Name", "Sign", "Rating"})
+table.SetCaption(true, "Movie ratings.")
+
+for _, v := range data {
+ table.Append(v)
+}
+table.Render() // Send output
+```
+
+Note: Caption text will wrap with total width of rendered table.
+
+##### Output 7
+```
++------+-----------------------+--------+
+| NAME | SIGN | RATING |
++------+-----------------------+--------+
+| A | The Good | 500 |
+| B | The Very very Bad Man | 288 |
+| C | The Ugly | 120 |
+| D | The Gopher | 800 |
++------+-----------------------+--------+
+Movie ratings.
+```
+
+#### Example 8 - Set NoWhiteSpace and TablePadding option
+```go
+data := [][]string{
+ {"node1.example.com", "Ready", "compute", "1.11"},
+ {"node2.example.com", "Ready", "compute", "1.11"},
+ {"node3.example.com", "Ready", "compute", "1.11"},
+ {"node4.example.com", "NotReady", "compute", "1.11"},
+}
+
+table := tablewriter.NewWriter(os.Stdout)
+table.SetHeader([]string{"Name", "Status", "Role", "Version"})
+table.SetAutoWrapText(false)
+table.SetAutoFormatHeaders(true)
+table.SetHeaderAlignment(ALIGN_LEFT)
+table.SetAlignment(ALIGN_LEFT)
+table.SetCenterSeparator("")
+table.SetColumnSeparator("")
+table.SetRowSeparator("")
+table.SetHeaderLine(false)
+table.SetBorder(false)
+table.SetTablePadding("\t") // pad with tabs
+table.SetNoWhiteSpace(true)
+table.AppendBulk(data) // Add Bulk Data
+table.Render()
+```
+
+##### Output 8
+```
+NAME STATUS ROLE VERSION
+node1.example.com Ready compute 1.11
+node2.example.com Ready compute 1.11
+node3.example.com Ready compute 1.11
+node4.example.com NotReady compute 1.11
+```
+
+#### Render table into a string
+
+Instead of rendering the table to `io.Stdout` you can also render it into a string. Go 1.10 introduced the `strings.Builder` type which implements the `io.Writer` interface and can therefore be used for this task. Example:
+
+```go
+package main
+
+import (
+ "strings"
+ "fmt"
+
+ "github.com/olekukonko/tablewriter"
+)
+
+func main() {
+ tableString := &strings.Builder{}
+ table := tablewriter.NewWriter(tableString)
+
+ /*
+ * Code to fill the table
+ */
+
+ table.Render()
+
+ fmt.Println(tableString.String())
+}
+```
+
+#### TODO
+- ~~Import Directly from CSV~~ - `done`
+- ~~Support for `SetFooter`~~ - `done`
+- ~~Support for `SetBorder`~~ - `done`
+- ~~Support table with uneven rows~~ - `done`
+- ~~Support custom alignment~~
+- General Improvement & Optimisation
+- `NewHTML` Parse table from HTML
diff --git a/vendor/github.com/olekukonko/tablewriter/csv.go b/vendor/github.com/olekukonko/tablewriter/csv.go
new file mode 100644
index 0000000000..98878303bc
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/csv.go
@@ -0,0 +1,52 @@
+// Copyright 2014 Oleku Konko All rights reserved.
+// Use of this source code is governed by a MIT
+// license that can be found in the LICENSE file.
+
+// This module is a Table Writer API for the Go Programming Language.
+// The protocols were written in pure Go and works on windows and unix systems
+
+package tablewriter
+
+import (
+ "encoding/csv"
+ "io"
+ "os"
+)
+
+// Start A new table by importing from a CSV file
+// Takes io.Writer and csv File name
+func NewCSV(writer io.Writer, fileName string, hasHeader bool) (*Table, error) {
+ file, err := os.Open(fileName)
+ if err != nil {
+ return &Table{}, err
+ }
+ defer file.Close()
+ csvReader := csv.NewReader(file)
+ t, err := NewCSVReader(writer, csvReader, hasHeader)
+ return t, err
+}
+
+// Start a New Table Writer with csv.Reader
+// This enables customisation such as reader.Comma = ';'
+// See http://golang.org/src/pkg/encoding/csv/reader.go?s=3213:3671#L94
+func NewCSVReader(writer io.Writer, csvReader *csv.Reader, hasHeader bool) (*Table, error) {
+ t := NewWriter(writer)
+ if hasHeader {
+ // Read the first row
+ headers, err := csvReader.Read()
+ if err != nil {
+ return &Table{}, err
+ }
+ t.SetHeader(headers)
+ }
+ for {
+ record, err := csvReader.Read()
+ if err == io.EOF {
+ break
+ } else if err != nil {
+ return &Table{}, err
+ }
+ t.Append(record)
+ }
+ return t, nil
+}
diff --git a/vendor/github.com/olekukonko/tablewriter/go.mod b/vendor/github.com/olekukonko/tablewriter/go.mod
new file mode 100644
index 0000000000..0430d99b01
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/go.mod
@@ -0,0 +1,5 @@
+module github.com/olekukonko/tablewriter
+
+go 1.12
+
+require github.com/mattn/go-runewidth v0.0.7
diff --git a/vendor/github.com/olekukonko/tablewriter/go.sum b/vendor/github.com/olekukonko/tablewriter/go.sum
new file mode 100644
index 0000000000..1e7b9aabda
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/go.sum
@@ -0,0 +1,2 @@
+github.com/mattn/go-runewidth v0.0.7 h1:Ei8KR0497xHyKJPAv59M1dkC+rOZCMBJ+t3fZ+twI54=
+github.com/mattn/go-runewidth v0.0.7/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
diff --git a/vendor/github.com/olekukonko/tablewriter/table.go b/vendor/github.com/olekukonko/tablewriter/table.go
new file mode 100644
index 0000000000..cf63eadfc4
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/table.go
@@ -0,0 +1,941 @@
+// Copyright 2014 Oleku Konko All rights reserved.
+// Use of this source code is governed by a MIT
+// license that can be found in the LICENSE file.
+
+// This module is a Table Writer API for the Go Programming Language.
+// The protocols were written in pure Go and works on windows and unix systems
+
+// Create & Generate text based table
+package tablewriter
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "regexp"
+ "strings"
+)
+
+const (
+ MAX_ROW_WIDTH = 30
+)
+
+const (
+ CENTER = "+"
+ ROW = "-"
+ COLUMN = "|"
+ SPACE = " "
+ NEWLINE = "\n"
+)
+
+const (
+ ALIGN_DEFAULT = iota
+ ALIGN_CENTER
+ ALIGN_RIGHT
+ ALIGN_LEFT
+)
+
+var (
+ decimal = regexp.MustCompile(`^-?(?:\d{1,3}(?:,\d{3})*|\d+)(?:\.\d+)?$`)
+ percent = regexp.MustCompile(`^-?\d+\.?\d*$%$`)
+)
+
+type Border struct {
+ Left bool
+ Right bool
+ Top bool
+ Bottom bool
+}
+
+type Table struct {
+ out io.Writer
+ rows [][]string
+ lines [][][]string
+ cs map[int]int
+ rs map[int]int
+ headers [][]string
+ footers [][]string
+ caption bool
+ captionText string
+ autoFmt bool
+ autoWrap bool
+ reflowText bool
+ mW int
+ pCenter string
+ pRow string
+ pColumn string
+ tColumn int
+ tRow int
+ hAlign int
+ fAlign int
+ align int
+ newLine string
+ rowLine bool
+ autoMergeCells bool
+ noWhiteSpace bool
+ tablePadding string
+ hdrLine bool
+ borders Border
+ colSize int
+ headerParams []string
+ columnsParams []string
+ footerParams []string
+ columnsAlign []int
+}
+
+// Start New Table
+// Take io.Writer Directly
+func NewWriter(writer io.Writer) *Table {
+ t := &Table{
+ out: writer,
+ rows: [][]string{},
+ lines: [][][]string{},
+ cs: make(map[int]int),
+ rs: make(map[int]int),
+ headers: [][]string{},
+ footers: [][]string{},
+ caption: false,
+ captionText: "Table caption.",
+ autoFmt: true,
+ autoWrap: true,
+ reflowText: true,
+ mW: MAX_ROW_WIDTH,
+ pCenter: CENTER,
+ pRow: ROW,
+ pColumn: COLUMN,
+ tColumn: -1,
+ tRow: -1,
+ hAlign: ALIGN_DEFAULT,
+ fAlign: ALIGN_DEFAULT,
+ align: ALIGN_DEFAULT,
+ newLine: NEWLINE,
+ rowLine: false,
+ hdrLine: true,
+ borders: Border{Left: true, Right: true, Bottom: true, Top: true},
+ colSize: -1,
+ headerParams: []string{},
+ columnsParams: []string{},
+ footerParams: []string{},
+ columnsAlign: []int{}}
+ return t
+}
+
+// Render table output
+func (t *Table) Render() {
+ if t.borders.Top {
+ t.printLine(true)
+ }
+ t.printHeading()
+ if t.autoMergeCells {
+ t.printRowsMergeCells()
+ } else {
+ t.printRows()
+ }
+ if !t.rowLine && t.borders.Bottom {
+ t.printLine(true)
+ }
+ t.printFooter()
+
+ if t.caption {
+ t.printCaption()
+ }
+}
+
+const (
+ headerRowIdx = -1
+ footerRowIdx = -2
+)
+
+// Set table header
+func (t *Table) SetHeader(keys []string) {
+ t.colSize = len(keys)
+ for i, v := range keys {
+ lines := t.parseDimension(v, i, headerRowIdx)
+ t.headers = append(t.headers, lines)
+ }
+}
+
+// Set table Footer
+func (t *Table) SetFooter(keys []string) {
+ //t.colSize = len(keys)
+ for i, v := range keys {
+ lines := t.parseDimension(v, i, footerRowIdx)
+ t.footers = append(t.footers, lines)
+ }
+}
+
+// Set table Caption
+func (t *Table) SetCaption(caption bool, captionText ...string) {
+ t.caption = caption
+ if len(captionText) == 1 {
+ t.captionText = captionText[0]
+ }
+}
+
+// Turn header autoformatting on/off. Default is on (true).
+func (t *Table) SetAutoFormatHeaders(auto bool) {
+ t.autoFmt = auto
+}
+
+// Turn automatic multiline text adjustment on/off. Default is on (true).
+func (t *Table) SetAutoWrapText(auto bool) {
+ t.autoWrap = auto
+}
+
+// Turn automatic reflowing of multiline text when rewrapping. Default is on (true).
+func (t *Table) SetReflowDuringAutoWrap(auto bool) {
+ t.reflowText = auto
+}
+
+// Set the Default column width
+func (t *Table) SetColWidth(width int) {
+ t.mW = width
+}
+
+// Set the minimal width for a column
+func (t *Table) SetColMinWidth(column int, width int) {
+ t.cs[column] = width
+}
+
+// Set the Column Separator
+func (t *Table) SetColumnSeparator(sep string) {
+ t.pColumn = sep
+}
+
+// Set the Row Separator
+func (t *Table) SetRowSeparator(sep string) {
+ t.pRow = sep
+}
+
+// Set the center Separator
+func (t *Table) SetCenterSeparator(sep string) {
+ t.pCenter = sep
+}
+
+// Set Header Alignment
+func (t *Table) SetHeaderAlignment(hAlign int) {
+ t.hAlign = hAlign
+}
+
+// Set Footer Alignment
+func (t *Table) SetFooterAlignment(fAlign int) {
+ t.fAlign = fAlign
+}
+
+// Set Table Alignment
+func (t *Table) SetAlignment(align int) {
+ t.align = align
+}
+
+// Set No White Space
+func (t *Table) SetNoWhiteSpace(allow bool) {
+ t.noWhiteSpace = allow
+}
+
+// Set Table Padding
+func (t *Table) SetTablePadding(padding string) {
+ t.tablePadding = padding
+}
+
+func (t *Table) SetColumnAlignment(keys []int) {
+ for _, v := range keys {
+ switch v {
+ case ALIGN_CENTER:
+ break
+ case ALIGN_LEFT:
+ break
+ case ALIGN_RIGHT:
+ break
+ default:
+ v = ALIGN_DEFAULT
+ }
+ t.columnsAlign = append(t.columnsAlign, v)
+ }
+}
+
+// Set New Line
+func (t *Table) SetNewLine(nl string) {
+ t.newLine = nl
+}
+
+// Set Header Line
+// This would enable / disable a line after the header
+func (t *Table) SetHeaderLine(line bool) {
+ t.hdrLine = line
+}
+
+// Set Row Line
+// This would enable / disable a line on each row of the table
+func (t *Table) SetRowLine(line bool) {
+ t.rowLine = line
+}
+
+// Set Auto Merge Cells
+// This would enable / disable the merge of cells with identical values
+func (t *Table) SetAutoMergeCells(auto bool) {
+ t.autoMergeCells = auto
+}
+
+// Set Table Border
+// This would enable / disable line around the table
+func (t *Table) SetBorder(border bool) {
+ t.SetBorders(Border{border, border, border, border})
+}
+
+func (t *Table) SetBorders(border Border) {
+ t.borders = border
+}
+
+// Append row to table
+func (t *Table) Append(row []string) {
+ rowSize := len(t.headers)
+ if rowSize > t.colSize {
+ t.colSize = rowSize
+ }
+
+ n := len(t.lines)
+ line := [][]string{}
+ for i, v := range row {
+
+ // Detect string width
+ // Detect String height
+ // Break strings into words
+ out := t.parseDimension(v, i, n)
+
+ // Append broken words
+ line = append(line, out)
+ }
+ t.lines = append(t.lines, line)
+}
+
+// Append row to table with color attributes
+func (t *Table) Rich(row []string, colors []Colors) {
+ rowSize := len(t.headers)
+ if rowSize > t.colSize {
+ t.colSize = rowSize
+ }
+
+ n := len(t.lines)
+ line := [][]string{}
+ for i, v := range row {
+
+ // Detect string width
+ // Detect String height
+ // Break strings into words
+ out := t.parseDimension(v, i, n)
+
+ if len(colors) > i {
+ color := colors[i]
+ out[0] = format(out[0], color)
+ }
+
+ // Append broken words
+ line = append(line, out)
+ }
+ t.lines = append(t.lines, line)
+}
+
+// Allow Support for Bulk Append
+// Eliminates repeated for loops
+func (t *Table) AppendBulk(rows [][]string) {
+ for _, row := range rows {
+ t.Append(row)
+ }
+}
+
+// NumLines to get the number of lines
+func (t *Table) NumLines() int {
+ return len(t.lines)
+}
+
+// Clear rows
+func (t *Table) ClearRows() {
+ t.lines = [][][]string{}
+}
+
+// Clear footer
+func (t *Table) ClearFooter() {
+ t.footers = [][]string{}
+}
+
+// Center based on position and border.
+func (t *Table) center(i int) string {
+ if i == -1 && !t.borders.Left {
+ return t.pRow
+ }
+
+ if i == len(t.cs)-1 && !t.borders.Right {
+ return t.pRow
+ }
+
+ return t.pCenter
+}
+
+// Print line based on row width
+func (t *Table) printLine(nl bool) {
+ fmt.Fprint(t.out, t.center(-1))
+ for i := 0; i < len(t.cs); i++ {
+ v := t.cs[i]
+ fmt.Fprintf(t.out, "%s%s%s%s",
+ t.pRow,
+ strings.Repeat(string(t.pRow), v),
+ t.pRow,
+ t.center(i))
+ }
+ if nl {
+ fmt.Fprint(t.out, t.newLine)
+ }
+}
+
+// Print line based on row width with our without cell separator
+func (t *Table) printLineOptionalCellSeparators(nl bool, displayCellSeparator []bool) {
+ fmt.Fprint(t.out, t.pCenter)
+ for i := 0; i < len(t.cs); i++ {
+ v := t.cs[i]
+ if i > len(displayCellSeparator) || displayCellSeparator[i] {
+ // Display the cell separator
+ fmt.Fprintf(t.out, "%s%s%s%s",
+ t.pRow,
+ strings.Repeat(string(t.pRow), v),
+ t.pRow,
+ t.pCenter)
+ } else {
+ // Don't display the cell separator for this cell
+ fmt.Fprintf(t.out, "%s%s",
+ strings.Repeat(" ", v+2),
+ t.pCenter)
+ }
+ }
+ if nl {
+ fmt.Fprint(t.out, t.newLine)
+ }
+}
+
+// Return the PadRight function if align is left, PadLeft if align is right,
+// and Pad by default
+func pad(align int) func(string, string, int) string {
+ padFunc := Pad
+ switch align {
+ case ALIGN_LEFT:
+ padFunc = PadRight
+ case ALIGN_RIGHT:
+ padFunc = PadLeft
+ }
+ return padFunc
+}
+
+// Print heading information
+func (t *Table) printHeading() {
+ // Check if headers is available
+ if len(t.headers) < 1 {
+ return
+ }
+
+ // Identify last column
+ end := len(t.cs) - 1
+
+ // Get pad function
+ padFunc := pad(t.hAlign)
+
+ // Checking for ANSI escape sequences for header
+ is_esc_seq := false
+ if len(t.headerParams) > 0 {
+ is_esc_seq = true
+ }
+
+ // Maximum height.
+ max := t.rs[headerRowIdx]
+
+ // Print Heading
+ for x := 0; x < max; x++ {
+ // Check if border is set
+ // Replace with space if not set
+ if !t.noWhiteSpace {
+ fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE))
+ }
+
+ for y := 0; y <= end; y++ {
+ v := t.cs[y]
+ h := ""
+
+ if y < len(t.headers) && x < len(t.headers[y]) {
+ h = t.headers[y][x]
+ }
+ if t.autoFmt {
+ h = Title(h)
+ }
+ pad := ConditionString((y == end && !t.borders.Left), SPACE, t.pColumn)
+ if t.noWhiteSpace {
+ pad = ConditionString((y == end && !t.borders.Left), SPACE, t.tablePadding)
+ }
+ if is_esc_seq {
+ if !t.noWhiteSpace {
+ fmt.Fprintf(t.out, " %s %s",
+ format(padFunc(h, SPACE, v),
+ t.headerParams[y]), pad)
+ } else {
+ fmt.Fprintf(t.out, "%s %s",
+ format(padFunc(h, SPACE, v),
+ t.headerParams[y]), pad)
+ }
+ } else {
+ if !t.noWhiteSpace {
+ fmt.Fprintf(t.out, " %s %s",
+ padFunc(h, SPACE, v),
+ pad)
+ } else {
+ // the spaces between breaks the kube formatting
+ fmt.Fprintf(t.out, "%s%s",
+ padFunc(h, SPACE, v),
+ pad)
+ }
+ }
+ }
+ // Next line
+ fmt.Fprint(t.out, t.newLine)
+ }
+ if t.hdrLine {
+ t.printLine(true)
+ }
+}
+
+// Print heading information
+func (t *Table) printFooter() {
+ // Check if headers is available
+ if len(t.footers) < 1 {
+ return
+ }
+
+ // Only print line if border is not set
+ if !t.borders.Bottom {
+ t.printLine(true)
+ }
+
+ // Identify last column
+ end := len(t.cs) - 1
+
+ // Get pad function
+ padFunc := pad(t.fAlign)
+
+ // Checking for ANSI escape sequences for header
+ is_esc_seq := false
+ if len(t.footerParams) > 0 {
+ is_esc_seq = true
+ }
+
+ // Maximum height.
+ max := t.rs[footerRowIdx]
+
+ // Print Footer
+ erasePad := make([]bool, len(t.footers))
+ for x := 0; x < max; x++ {
+ // Check if border is set
+ // Replace with space if not set
+ fmt.Fprint(t.out, ConditionString(t.borders.Bottom, t.pColumn, SPACE))
+
+ for y := 0; y <= end; y++ {
+ v := t.cs[y]
+ f := ""
+ if y < len(t.footers) && x < len(t.footers[y]) {
+ f = t.footers[y][x]
+ }
+ if t.autoFmt {
+ f = Title(f)
+ }
+ pad := ConditionString((y == end && !t.borders.Top), SPACE, t.pColumn)
+
+ if erasePad[y] || (x == 0 && len(f) == 0) {
+ pad = SPACE
+ erasePad[y] = true
+ }
+
+ if is_esc_seq {
+ fmt.Fprintf(t.out, " %s %s",
+ format(padFunc(f, SPACE, v),
+ t.footerParams[y]), pad)
+ } else {
+ fmt.Fprintf(t.out, " %s %s",
+ padFunc(f, SPACE, v),
+ pad)
+ }
+
+ //fmt.Fprintf(t.out, " %s %s",
+ // padFunc(f, SPACE, v),
+ // pad)
+ }
+ // Next line
+ fmt.Fprint(t.out, t.newLine)
+ //t.printLine(true)
+ }
+
+ hasPrinted := false
+
+ for i := 0; i <= end; i++ {
+ v := t.cs[i]
+ pad := t.pRow
+ center := t.pCenter
+ length := len(t.footers[i][0])
+
+ if length > 0 {
+ hasPrinted = true
+ }
+
+ // Set center to be space if length is 0
+ if length == 0 && !t.borders.Right {
+ center = SPACE
+ }
+
+ // Print first junction
+ if i == 0 {
+ if length > 0 && !t.borders.Left {
+ center = t.pRow
+ }
+ fmt.Fprint(t.out, center)
+ }
+
+ // Pad With space of length is 0
+ if length == 0 {
+ pad = SPACE
+ }
+ // Ignore left space as it has printed before
+ if hasPrinted || t.borders.Left {
+ pad = t.pRow
+ center = t.pCenter
+ }
+
+ // Change Center end position
+ if center != SPACE {
+ if i == end && !t.borders.Right {
+ center = t.pRow
+ }
+ }
+
+ // Change Center start position
+ if center == SPACE {
+ if i < end && len(t.footers[i+1][0]) != 0 {
+ if !t.borders.Left {
+ center = t.pRow
+ } else {
+ center = t.pCenter
+ }
+ }
+ }
+
+ // Print the footer
+ fmt.Fprintf(t.out, "%s%s%s%s",
+ pad,
+ strings.Repeat(string(pad), v),
+ pad,
+ center)
+
+ }
+
+ fmt.Fprint(t.out, t.newLine)
+}
+
+// Print caption text
+func (t Table) printCaption() {
+ width := t.getTableWidth()
+ paragraph, _ := WrapString(t.captionText, width)
+ for linecount := 0; linecount < len(paragraph); linecount++ {
+ fmt.Fprintln(t.out, paragraph[linecount])
+ }
+}
+
+// Calculate the total number of characters in a row
+func (t Table) getTableWidth() int {
+ var chars int
+ for _, v := range t.cs {
+ chars += v
+ }
+
+ // Add chars, spaces, seperators to calculate the total width of the table.
+ // ncols := t.colSize
+ // spaces := ncols * 2
+ // seps := ncols + 1
+
+ return (chars + (3 * t.colSize) + 2)
+}
+
+func (t Table) printRows() {
+ for i, lines := range t.lines {
+ t.printRow(lines, i)
+ }
+}
+
+func (t *Table) fillAlignment(num int) {
+ if len(t.columnsAlign) < num {
+ t.columnsAlign = make([]int, num)
+ for i := range t.columnsAlign {
+ t.columnsAlign[i] = t.align
+ }
+ }
+}
+
+// Print Row Information
+// Adjust column alignment based on type
+
+func (t *Table) printRow(columns [][]string, rowIdx int) {
+ // Get Maximum Height
+ max := t.rs[rowIdx]
+ total := len(columns)
+
+ // TODO Fix uneven col size
+ // if total < t.colSize {
+ // for n := t.colSize - total; n < t.colSize ; n++ {
+ // columns = append(columns, []string{SPACE})
+ // t.cs[n] = t.mW
+ // }
+ //}
+
+ // Pad Each Height
+ pads := []int{}
+
+ // Checking for ANSI escape sequences for columns
+ is_esc_seq := false
+ if len(t.columnsParams) > 0 {
+ is_esc_seq = true
+ }
+ t.fillAlignment(total)
+
+ for i, line := range columns {
+ length := len(line)
+ pad := max - length
+ pads = append(pads, pad)
+ for n := 0; n < pad; n++ {
+ columns[i] = append(columns[i], " ")
+ }
+ }
+ //fmt.Println(max, "\n")
+ for x := 0; x < max; x++ {
+ for y := 0; y < total; y++ {
+
+ // Check if border is set
+ if !t.noWhiteSpace {
+ fmt.Fprint(t.out, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn))
+ fmt.Fprintf(t.out, SPACE)
+ }
+
+ str := columns[y][x]
+
+ // Embedding escape sequence with column value
+ if is_esc_seq {
+ str = format(str, t.columnsParams[y])
+ }
+
+ // This would print alignment
+ // Default alignment would use multiple configuration
+ switch t.columnsAlign[y] {
+ case ALIGN_CENTER: //
+ fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y]))
+ case ALIGN_RIGHT:
+ fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y]))
+ case ALIGN_LEFT:
+ fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y]))
+ default:
+ if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) {
+ fmt.Fprintf(t.out, "%s", PadLeft(str, SPACE, t.cs[y]))
+ } else {
+ fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y]))
+
+ // TODO Custom alignment per column
+ //if max == 1 || pads[y] > 0 {
+ // fmt.Fprintf(t.out, "%s", Pad(str, SPACE, t.cs[y]))
+ //} else {
+ // fmt.Fprintf(t.out, "%s", PadRight(str, SPACE, t.cs[y]))
+ //}
+
+ }
+ }
+ if !t.noWhiteSpace {
+ fmt.Fprintf(t.out, SPACE)
+ } else {
+ fmt.Fprintf(t.out, t.tablePadding)
+ }
+ }
+ // Check if border is set
+ // Replace with space if not set
+ if !t.noWhiteSpace {
+ fmt.Fprint(t.out, ConditionString(t.borders.Left, t.pColumn, SPACE))
+ }
+ fmt.Fprint(t.out, t.newLine)
+ }
+
+ if t.rowLine {
+ t.printLine(true)
+ }
+}
+
+// Print the rows of the table and merge the cells that are identical
+func (t *Table) printRowsMergeCells() {
+ var previousLine []string
+ var displayCellBorder []bool
+ var tmpWriter bytes.Buffer
+ for i, lines := range t.lines {
+ // We store the display of the current line in a tmp writer, as we need to know which border needs to be print above
+ previousLine, displayCellBorder = t.printRowMergeCells(&tmpWriter, lines, i, previousLine)
+ if i > 0 { //We don't need to print borders above first line
+ if t.rowLine {
+ t.printLineOptionalCellSeparators(true, displayCellBorder)
+ }
+ }
+ tmpWriter.WriteTo(t.out)
+ }
+ //Print the end of the table
+ if t.rowLine {
+ t.printLine(true)
+ }
+}
+
+// Print Row Information to a writer and merge identical cells.
+// Adjust column alignment based on type
+
+func (t *Table) printRowMergeCells(writer io.Writer, columns [][]string, rowIdx int, previousLine []string) ([]string, []bool) {
+ // Get Maximum Height
+ max := t.rs[rowIdx]
+ total := len(columns)
+
+ // Pad Each Height
+ pads := []int{}
+
+ // Checking for ANSI escape sequences for columns
+ is_esc_seq := false
+ if len(t.columnsParams) > 0 {
+ is_esc_seq = true
+ }
+ for i, line := range columns {
+ length := len(line)
+ pad := max - length
+ pads = append(pads, pad)
+ for n := 0; n < pad; n++ {
+ columns[i] = append(columns[i], " ")
+ }
+ }
+
+ var displayCellBorder []bool
+ t.fillAlignment(total)
+ for x := 0; x < max; x++ {
+ for y := 0; y < total; y++ {
+
+ // Check if border is set
+ fmt.Fprint(writer, ConditionString((!t.borders.Left && y == 0), SPACE, t.pColumn))
+
+ fmt.Fprintf(writer, SPACE)
+
+ str := columns[y][x]
+
+ // Embedding escape sequence with column value
+ if is_esc_seq {
+ str = format(str, t.columnsParams[y])
+ }
+
+ if t.autoMergeCells {
+ //Store the full line to merge mutli-lines cells
+ fullLine := strings.TrimRight(strings.Join(columns[y], " "), " ")
+ if len(previousLine) > y && fullLine == previousLine[y] && fullLine != "" {
+ // If this cell is identical to the one above but not empty, we don't display the border and keep the cell empty.
+ displayCellBorder = append(displayCellBorder, false)
+ str = ""
+ } else {
+ // First line or different content, keep the content and print the cell border
+ displayCellBorder = append(displayCellBorder, true)
+ }
+ }
+
+ // This would print alignment
+ // Default alignment would use multiple configuration
+ switch t.columnsAlign[y] {
+ case ALIGN_CENTER: //
+ fmt.Fprintf(writer, "%s", Pad(str, SPACE, t.cs[y]))
+ case ALIGN_RIGHT:
+ fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y]))
+ case ALIGN_LEFT:
+ fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y]))
+ default:
+ if decimal.MatchString(strings.TrimSpace(str)) || percent.MatchString(strings.TrimSpace(str)) {
+ fmt.Fprintf(writer, "%s", PadLeft(str, SPACE, t.cs[y]))
+ } else {
+ fmt.Fprintf(writer, "%s", PadRight(str, SPACE, t.cs[y]))
+ }
+ }
+ fmt.Fprintf(writer, SPACE)
+ }
+ // Check if border is set
+ // Replace with space if not set
+ fmt.Fprint(writer, ConditionString(t.borders.Left, t.pColumn, SPACE))
+ fmt.Fprint(writer, t.newLine)
+ }
+
+ //The new previous line is the current one
+ previousLine = make([]string, total)
+ for y := 0; y < total; y++ {
+ previousLine[y] = strings.TrimRight(strings.Join(columns[y], " "), " ") //Store the full line for multi-lines cells
+ }
+ //Returns the newly added line and wether or not a border should be displayed above.
+ return previousLine, displayCellBorder
+}
+
+func (t *Table) parseDimension(str string, colKey, rowKey int) []string {
+ var (
+ raw []string
+ maxWidth int
+ )
+
+ raw = getLines(str)
+ maxWidth = 0
+ for _, line := range raw {
+ if w := DisplayWidth(line); w > maxWidth {
+ maxWidth = w
+ }
+ }
+
+ // If wrapping, ensure that all paragraphs in the cell fit in the
+ // specified width.
+ if t.autoWrap {
+ // If there's a maximum allowed width for wrapping, use that.
+ if maxWidth > t.mW {
+ maxWidth = t.mW
+ }
+
+ // In the process of doing so, we need to recompute maxWidth. This
+ // is because perhaps a word in the cell is longer than the
+ // allowed maximum width in t.mW.
+ newMaxWidth := maxWidth
+ newRaw := make([]string, 0, len(raw))
+
+ if t.reflowText {
+ // Make a single paragraph of everything.
+ raw = []string{strings.Join(raw, " ")}
+ }
+ for i, para := range raw {
+ paraLines, _ := WrapString(para, maxWidth)
+ for _, line := range paraLines {
+ if w := DisplayWidth(line); w > newMaxWidth {
+ newMaxWidth = w
+ }
+ }
+ if i > 0 {
+ newRaw = append(newRaw, " ")
+ }
+ newRaw = append(newRaw, paraLines...)
+ }
+ raw = newRaw
+ maxWidth = newMaxWidth
+ }
+
+ // Store the new known maximum width.
+ v, ok := t.cs[colKey]
+ if !ok || v < maxWidth || v == 0 {
+ t.cs[colKey] = maxWidth
+ }
+
+ // Remember the number of lines for the row printer.
+ h := len(raw)
+ v, ok = t.rs[rowKey]
+
+ if !ok || v < h || v == 0 {
+ t.rs[rowKey] = h
+ }
+ //fmt.Printf("Raw %+v %d\n", raw, len(raw))
+ return raw
+}
diff --git a/vendor/github.com/olekukonko/tablewriter/table_with_color.go b/vendor/github.com/olekukonko/tablewriter/table_with_color.go
new file mode 100644
index 0000000000..ae7a364aed
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/table_with_color.go
@@ -0,0 +1,136 @@
+package tablewriter
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+const ESC = "\033"
+const SEP = ";"
+
+const (
+ BgBlackColor int = iota + 40
+ BgRedColor
+ BgGreenColor
+ BgYellowColor
+ BgBlueColor
+ BgMagentaColor
+ BgCyanColor
+ BgWhiteColor
+)
+
+const (
+ FgBlackColor int = iota + 30
+ FgRedColor
+ FgGreenColor
+ FgYellowColor
+ FgBlueColor
+ FgMagentaColor
+ FgCyanColor
+ FgWhiteColor
+)
+
+const (
+ BgHiBlackColor int = iota + 100
+ BgHiRedColor
+ BgHiGreenColor
+ BgHiYellowColor
+ BgHiBlueColor
+ BgHiMagentaColor
+ BgHiCyanColor
+ BgHiWhiteColor
+)
+
+const (
+ FgHiBlackColor int = iota + 90
+ FgHiRedColor
+ FgHiGreenColor
+ FgHiYellowColor
+ FgHiBlueColor
+ FgHiMagentaColor
+ FgHiCyanColor
+ FgHiWhiteColor
+)
+
+const (
+ Normal = 0
+ Bold = 1
+ UnderlineSingle = 4
+ Italic
+)
+
+type Colors []int
+
+func startFormat(seq string) string {
+ return fmt.Sprintf("%s[%sm", ESC, seq)
+}
+
+func stopFormat() string {
+ return fmt.Sprintf("%s[%dm", ESC, Normal)
+}
+
+// Making the SGR (Select Graphic Rendition) sequence.
+func makeSequence(codes []int) string {
+ codesInString := []string{}
+ for _, code := range codes {
+ codesInString = append(codesInString, strconv.Itoa(code))
+ }
+ return strings.Join(codesInString, SEP)
+}
+
+// Adding ANSI escape sequences before and after string
+func format(s string, codes interface{}) string {
+ var seq string
+
+ switch v := codes.(type) {
+
+ case string:
+ seq = v
+ case []int:
+ seq = makeSequence(v)
+ case Colors:
+ seq = makeSequence(v)
+ default:
+ return s
+ }
+
+ if len(seq) == 0 {
+ return s
+ }
+ return startFormat(seq) + s + stopFormat()
+}
+
+// Adding header colors (ANSI codes)
+func (t *Table) SetHeaderColor(colors ...Colors) {
+ if t.colSize != len(colors) {
+ panic("Number of header colors must be equal to number of headers.")
+ }
+ for i := 0; i < len(colors); i++ {
+ t.headerParams = append(t.headerParams, makeSequence(colors[i]))
+ }
+}
+
+// Adding column colors (ANSI codes)
+func (t *Table) SetColumnColor(colors ...Colors) {
+ if t.colSize != len(colors) {
+ panic("Number of column colors must be equal to number of headers.")
+ }
+ for i := 0; i < len(colors); i++ {
+ t.columnsParams = append(t.columnsParams, makeSequence(colors[i]))
+ }
+}
+
+// Adding column colors (ANSI codes)
+func (t *Table) SetFooterColor(colors ...Colors) {
+ if len(t.footers) != len(colors) {
+ panic("Number of footer colors must be equal to number of footer.")
+ }
+ for i := 0; i < len(colors); i++ {
+ t.footerParams = append(t.footerParams, makeSequence(colors[i]))
+ }
+}
+
+func Color(colors ...int) []int {
+ return colors
+}
diff --git a/vendor/github.com/olekukonko/tablewriter/util.go b/vendor/github.com/olekukonko/tablewriter/util.go
new file mode 100644
index 0000000000..380e7ab35b
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/util.go
@@ -0,0 +1,93 @@
+// Copyright 2014 Oleku Konko All rights reserved.
+// Use of this source code is governed by a MIT
+// license that can be found in the LICENSE file.
+
+// This module is a Table Writer API for the Go Programming Language.
+// The protocols were written in pure Go and works on windows and unix systems
+
+package tablewriter
+
+import (
+ "math"
+ "regexp"
+ "strings"
+
+ "github.com/mattn/go-runewidth"
+)
+
+var ansi = regexp.MustCompile("\033\\[(?:[0-9]{1,3}(?:;[0-9]{1,3})*)?[m|K]")
+
+func DisplayWidth(str string) int {
+ return runewidth.StringWidth(ansi.ReplaceAllLiteralString(str, ""))
+}
+
+// Simple Condition for string
+// Returns value based on condition
+func ConditionString(cond bool, valid, inValid string) string {
+ if cond {
+ return valid
+ }
+ return inValid
+}
+
+func isNumOrSpace(r rune) bool {
+ return ('0' <= r && r <= '9') || r == ' '
+}
+
+// Format Table Header
+// Replace _ , . and spaces
+func Title(name string) string {
+ origLen := len(name)
+ rs := []rune(name)
+ for i, r := range rs {
+ switch r {
+ case '_':
+ rs[i] = ' '
+ case '.':
+ // ignore floating number 0.0
+ if (i != 0 && !isNumOrSpace(rs[i-1])) || (i != len(rs)-1 && !isNumOrSpace(rs[i+1])) {
+ rs[i] = ' '
+ }
+ }
+ }
+ name = string(rs)
+ name = strings.TrimSpace(name)
+ if len(name) == 0 && origLen > 0 {
+ // Keep at least one character. This is important to preserve
+ // empty lines in multi-line headers/footers.
+ name = " "
+ }
+ return strings.ToUpper(name)
+}
+
+// Pad String
+// Attempts to place string in the center
+func Pad(s, pad string, width int) string {
+ gap := width - DisplayWidth(s)
+ if gap > 0 {
+ gapLeft := int(math.Ceil(float64(gap / 2)))
+ gapRight := gap - gapLeft
+ return strings.Repeat(string(pad), gapLeft) + s + strings.Repeat(string(pad), gapRight)
+ }
+ return s
+}
+
+// Pad String Right position
+// This would place string at the left side of the screen
+func PadRight(s, pad string, width int) string {
+ gap := width - DisplayWidth(s)
+ if gap > 0 {
+ return s + strings.Repeat(string(pad), gap)
+ }
+ return s
+}
+
+// Pad String Left position
+// This would place string at the right side of the screen
+func PadLeft(s, pad string, width int) string {
+ gap := width - DisplayWidth(s)
+ if gap > 0 {
+ return strings.Repeat(string(pad), gap) + s
+ }
+ return s
+}
diff --git a/vendor/github.com/olekukonko/tablewriter/wrap.go b/vendor/github.com/olekukonko/tablewriter/wrap.go
new file mode 100644
index 0000000000..a092ee1f75
--- /dev/null
+++ b/vendor/github.com/olekukonko/tablewriter/wrap.go
@@ -0,0 +1,99 @@
+// Copyright 2014 Oleku Konko All rights reserved.
+// Use of this source code is governed by a MIT
+// license that can be found in the LICENSE file.
+
+// This module is a Table Writer API for the Go Programming Language.
+// The protocols were written in pure Go and works on windows and unix systems
+
+package tablewriter
+
+import (
+ "math"
+ "strings"
+
+ "github.com/mattn/go-runewidth"
+)
+
+var (
+ nl = "\n"
+ sp = " "
+)
+
+const defaultPenalty = 1e5
+
+// Wrap wraps s into a paragraph of lines of length lim, with minimal
+// raggedness.
+func WrapString(s string, lim int) ([]string, int) {
+ words := strings.Split(strings.Replace(s, nl, sp, -1), sp)
+ var lines []string
+ max := 0
+ for _, v := range words {
+ max = runewidth.StringWidth(v)
+ if max > lim {
+ lim = max
+ }
+ }
+ for _, line := range WrapWords(words, 1, lim, defaultPenalty) {
+ lines = append(lines, strings.Join(line, sp))
+ }
+ return lines, lim
+}
+
+// WrapWords is the low-level line-breaking algorithm, useful if you need more
+// control over the details of the text wrapping process. For most uses,
+// WrapString will be sufficient and more convenient.
+//
+// WrapWords splits a list of words into lines with minimal "raggedness",
+// treating each rune as one unit, accounting for spc units between adjacent
+// words on each line, and attempting to limit lines to lim units. Raggedness
+// is the total error over all lines, where error is the square of the
+// difference of the length of the line and lim. Too-long lines (which only
+// happen when a single word is longer than lim units) have pen penalty units
+// added to the error.
+func WrapWords(words []string, spc, lim, pen int) [][]string {
+ n := len(words)
+
+ length := make([][]int, n)
+ for i := 0; i < n; i++ {
+ length[i] = make([]int, n)
+ length[i][i] = runewidth.StringWidth(words[i])
+ for j := i + 1; j < n; j++ {
+ length[i][j] = length[i][j-1] + spc + runewidth.StringWidth(words[j])
+ }
+ }
+ nbrk := make([]int, n)
+ cost := make([]int, n)
+ for i := range cost {
+ cost[i] = math.MaxInt32
+ }
+ for i := n - 1; i >= 0; i-- {
+ if length[i][n-1] <= lim {
+ cost[i] = 0
+ nbrk[i] = n
+ } else {
+ for j := i + 1; j < n; j++ {
+ d := lim - length[i][j-1]
+ c := d*d + cost[j]
+ if length[i][j-1] > lim {
+ c += pen // too-long lines get a worse penalty
+ }
+ if c < cost[i] {
+ cost[i] = c
+ nbrk[i] = j
+ }
+ }
+ }
+ }
+ var lines [][]string
+ i := 0
+ for i < n {
+ lines = append(lines, words[i:nbrk[i]])
+ i = nbrk[i]
+ }
+ return lines
+}
+
+// getLines decomposes a multiline string into a slice of strings.
+func getLines(s string) []string {
+ return strings.Split(s, nl)
+}
diff --git a/vendor/github.com/pkg/errors/.travis.yml b/vendor/github.com/pkg/errors/.travis.yml
index d4b92663ba..9159de03e0 100644
--- a/vendor/github.com/pkg/errors/.travis.yml
+++ b/vendor/github.com/pkg/errors/.travis.yml
@@ -1,15 +1,10 @@
language: go
go_import_path: github.com/pkg/errors
go:
- - 1.4.x
- - 1.5.x
- - 1.6.x
- - 1.7.x
- - 1.8.x
- - 1.9.x
- - 1.10.x
- 1.11.x
+ - 1.12.x
+ - 1.13.x
- tip
script:
- - go test -v ./...
+ - make check
diff --git a/vendor/github.com/pkg/errors/Makefile b/vendor/github.com/pkg/errors/Makefile
new file mode 100644
index 0000000000..ce9d7cded6
--- /dev/null
+++ b/vendor/github.com/pkg/errors/Makefile
@@ -0,0 +1,44 @@
+PKGS := github.com/pkg/errors
+SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS))
+GO := go
+
+check: test vet gofmt misspell unconvert staticcheck ineffassign unparam
+
+test:
+ $(GO) test $(PKGS)
+
+vet: | test
+ $(GO) vet $(PKGS)
+
+staticcheck:
+ $(GO) get honnef.co/go/tools/cmd/staticcheck
+ staticcheck -checks all $(PKGS)
+
+misspell:
+ $(GO) get github.com/client9/misspell/cmd/misspell
+ misspell \
+ -locale GB \
+ -error \
+ *.md *.go
+
+unconvert:
+ $(GO) get github.com/mdempsky/unconvert
+ unconvert -v $(PKGS)
+
+ineffassign:
+ $(GO) get github.com/gordonklaus/ineffassign
+ find $(SRCDIRS) -name '*.go' | xargs ineffassign
+
+pedantic: check errcheck
+
+unparam:
+ $(GO) get mvdan.cc/unparam
+ unparam ./...
+
+errcheck:
+ $(GO) get github.com/kisielk/errcheck
+ errcheck $(PKGS)
+
+gofmt:
+ @echo Checking code is gofmted
+ @test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)"
diff --git a/vendor/github.com/pkg/errors/README.md b/vendor/github.com/pkg/errors/README.md
index 6483ba2afb..54dfdcb12e 100644
--- a/vendor/github.com/pkg/errors/README.md
+++ b/vendor/github.com/pkg/errors/README.md
@@ -41,11 +41,18 @@ default:
[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors).
+## Roadmap
+
+With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows:
+
+- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible)
+- 1.0. Final release.
+
## Contributing
-We welcome pull requests, bug fixes and issue reports. With that said, the bar for adding new symbols to this package is intentionally set high.
+Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports.
-Before proposing a change, please discuss your change by raising an issue.
+Before sending a PR, please discuss your change by raising an issue.
## License
diff --git a/vendor/github.com/pkg/errors/errors.go b/vendor/github.com/pkg/errors/errors.go
index 7421f326ff..161aea2582 100644
--- a/vendor/github.com/pkg/errors/errors.go
+++ b/vendor/github.com/pkg/errors/errors.go
@@ -82,7 +82,7 @@
//
// if err, ok := err.(stackTracer); ok {
// for _, f := range err.StackTrace() {
-// fmt.Printf("%+s:%d", f)
+// fmt.Printf("%+s:%d\n", f, f)
// }
// }
//
@@ -159,6 +159,9 @@ type withStack struct {
func (w *withStack) Cause() error { return w.error }
+// Unwrap provides compatibility for Go 1.13 error chains.
+func (w *withStack) Unwrap() error { return w.error }
+
func (w *withStack) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
@@ -241,6 +244,9 @@ type withMessage struct {
func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() }
func (w *withMessage) Cause() error { return w.cause }
+// Unwrap provides compatibility for Go 1.13 error chains.
+func (w *withMessage) Unwrap() error { return w.cause }
+
func (w *withMessage) Format(s fmt.State, verb rune) {
switch verb {
case 'v':
diff --git a/vendor/github.com/pkg/errors/go113.go b/vendor/github.com/pkg/errors/go113.go
new file mode 100644
index 0000000000..be0d10d0c7
--- /dev/null
+++ b/vendor/github.com/pkg/errors/go113.go
@@ -0,0 +1,38 @@
+// +build go1.13
+
+package errors
+
+import (
+ stderrors "errors"
+)
+
+// Is reports whether any error in err's chain matches target.
+//
+// The chain consists of err itself followed by the sequence of errors obtained by
+// repeatedly calling Unwrap.
+//
+// An error is considered to match a target if it is equal to that target or if
+// it implements a method Is(error) bool such that Is(target) returns true.
+func Is(err, target error) bool { return stderrors.Is(err, target) }
+
+// As finds the first error in err's chain that matches target, and if so, sets
+// target to that error value and returns true.
+//
+// The chain consists of err itself followed by the sequence of errors obtained by
+// repeatedly calling Unwrap.
+//
+// An error matches target if the error's concrete value is assignable to the value
+// pointed to by target, or if the error has a method As(interface{}) bool such that
+// As(target) returns true. In the latter case, the As method is responsible for
+// setting target.
+//
+// As will panic if target is not a non-nil pointer to either a type that implements
+// error, or to any interface type. As returns false if err is nil.
+func As(err error, target interface{}) bool { return stderrors.As(err, target) }
+
+// Unwrap returns the result of calling the Unwrap method on err, if err's
+// type contains an Unwrap method returning error.
+// Otherwise, Unwrap returns nil.
+func Unwrap(err error) error {
+ return stderrors.Unwrap(err)
+}
diff --git a/vendor/github.com/pkg/errors/stack.go b/vendor/github.com/pkg/errors/stack.go
index 2874a048cf..779a8348fb 100644
--- a/vendor/github.com/pkg/errors/stack.go
+++ b/vendor/github.com/pkg/errors/stack.go
@@ -5,10 +5,13 @@ import (
"io"
"path"
"runtime"
+ "strconv"
"strings"
)
// Frame represents a program counter inside a stack frame.
+// For historical reasons if Frame is interpreted as a uintptr
+// its value represents the program counter + 1.
type Frame uintptr
// pc returns the program counter for this frame;
@@ -37,6 +40,15 @@ func (f Frame) line() int {
return line
}
+// name returns the name of this function, if known.
+func (f Frame) name() string {
+ fn := runtime.FuncForPC(f.pc())
+ if fn == nil {
+ return "unknown"
+ }
+ return fn.Name()
+}
+
// Format formats the frame according to the fmt.Formatter interface.
//
// %s source file
@@ -54,22 +66,16 @@ func (f Frame) Format(s fmt.State, verb rune) {
case 's':
switch {
case s.Flag('+'):
- pc := f.pc()
- fn := runtime.FuncForPC(pc)
- if fn == nil {
- io.WriteString(s, "unknown")
- } else {
- file, _ := fn.FileLine(pc)
- fmt.Fprintf(s, "%s\n\t%s", fn.Name(), file)
- }
+ io.WriteString(s, f.name())
+ io.WriteString(s, "\n\t")
+ io.WriteString(s, f.file())
default:
io.WriteString(s, path.Base(f.file()))
}
case 'd':
- fmt.Fprintf(s, "%d", f.line())
+ io.WriteString(s, strconv.Itoa(f.line()))
case 'n':
- name := runtime.FuncForPC(f.pc()).Name()
- io.WriteString(s, funcname(name))
+ io.WriteString(s, funcname(f.name()))
case 'v':
f.Format(s, 's')
io.WriteString(s, ":")
@@ -77,6 +83,16 @@ func (f Frame) Format(s fmt.State, verb rune) {
}
}
+// MarshalText formats a stacktrace Frame as a text string. The output is the
+// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs.
+func (f Frame) MarshalText() ([]byte, error) {
+ name := f.name()
+ if name == "unknown" {
+ return []byte(name), nil
+ }
+ return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil
+}
+
// StackTrace is stack of Frames from innermost (newest) to outermost (oldest).
type StackTrace []Frame
@@ -94,16 +110,30 @@ func (st StackTrace) Format(s fmt.State, verb rune) {
switch {
case s.Flag('+'):
for _, f := range st {
- fmt.Fprintf(s, "\n%+v", f)
+ io.WriteString(s, "\n")
+ f.Format(s, verb)
}
case s.Flag('#'):
fmt.Fprintf(s, "%#v", []Frame(st))
default:
- fmt.Fprintf(s, "%v", []Frame(st))
+ st.formatSlice(s, verb)
}
case 's':
- fmt.Fprintf(s, "%s", []Frame(st))
+ st.formatSlice(s, verb)
+ }
+}
+
+// formatSlice will format this StackTrace into the given buffer as a slice of
+// Frame, only valid when called with '%s' or '%v'.
+func (st StackTrace) formatSlice(s fmt.State, verb rune) {
+ io.WriteString(s, "[")
+ for i, f := range st {
+ if i > 0 {
+ io.WriteString(s, " ")
+ }
+ f.Format(s, verb)
}
+ io.WriteString(s, "]")
}
// stack represents a stack of program counters.
diff --git a/vendor/golang.org/x/mod/LICENSE b/vendor/golang.org/x/mod/LICENSE
new file mode 100644
index 0000000000..6a66aea5ea
--- /dev/null
+++ b/vendor/golang.org/x/mod/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2009 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/mod/PATENTS b/vendor/golang.org/x/mod/PATENTS
new file mode 100644
index 0000000000..733099041f
--- /dev/null
+++ b/vendor/golang.org/x/mod/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/tools/internal/module/module.go b/vendor/golang.org/x/mod/module/module.go
index 9a4edb9dec..21f123957d 100644
--- a/vendor/golang.org/x/tools/internal/module/module.go
+++ b/vendor/golang.org/x/mod/module/module.go
@@ -2,8 +2,86 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-// Package module defines the module.Version type
-// along with support code.
+// Package module defines the module.Version type along with support code.
+//
+// The module.Version type is a simple Path, Version pair:
+//
+// type Version struct {
+// Path string
+// Version string
+// }
+//
+// There are no restrictions imposed directly by use of this structure,
+// but additional checking functions, most notably Check, verify that
+// a particular path, version pair is valid.
+//
+// Escaped Paths
+//
+// Module paths appear as substrings of file system paths
+// (in the download cache) and of web server URLs in the proxy protocol.
+// In general we cannot rely on file systems to be case-sensitive,
+// nor can we rely on web servers, since they read from file systems.
+// That is, we cannot rely on the file system to keep rsc.io/QUOTE
+// and rsc.io/quote separate. Windows and macOS don't.
+// Instead, we must never require two different casings of a file path.
+// Because we want the download cache to match the proxy protocol,
+// and because we want the proxy protocol to be possible to serve
+// from a tree of static files (which might be stored on a case-insensitive
+// file system), the proxy protocol must never require two different casings
+// of a URL path either.
+//
+// One possibility would be to make the escaped form be the lowercase
+// hexadecimal encoding of the actual path bytes. This would avoid ever
+// needing different casings of a file path, but it would be fairly illegible
+// to most programmers when those paths appeared in the file system
+// (including in file paths in compiler errors and stack traces)
+// in web server logs, and so on. Instead, we want a safe escaped form that
+// leaves most paths unaltered.
+//
+// The safe escaped form is to replace every uppercase letter
+// with an exclamation mark followed by the letter's lowercase equivalent.
+//
+// For example,
+//
+// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go.
+// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy
+// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus.
+//
+// Import paths that avoid upper-case letters are left unchanged.
+// Note that because import paths are ASCII-only and avoid various
+// problematic punctuation (like : < and >), the escaped form is also ASCII-only
+// and avoids the same problematic punctuation.
+//
+// Import paths have never allowed exclamation marks, so there is no
+// need to define how to escape a literal !.
+//
+// Unicode Restrictions
+//
+// Today, paths are disallowed from using Unicode.
+//
+// Although paths are currently disallowed from using Unicode,
+// we would like at some point to allow Unicode letters as well, to assume that
+// file systems and URLs are Unicode-safe (storing UTF-8), and apply
+// the !-for-uppercase convention for escaping them in the file system.
+// But there are at least two subtle considerations.
+//
+// First, note that not all case-fold equivalent distinct runes
+// form an upper/lower pair.
+// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin)
+// are three distinct runes that case-fold to each other.
+// When we do add Unicode letters, we must not assume that upper/lower
+// are the only case-equivalent pairs.
+// Perhaps the Kelvin symbol would be disallowed entirely, for example.
+// Or perhaps it would escape as "!!k", or perhaps as "(212A)".
+//
+// Second, it would be nice to allow Unicode marks as well as letters,
+// but marks include combining marks, and then we must deal not
+// only with case folding but also normalization: both U+00E9 ('é')
+// and U+0065 U+0301 ('e' followed by combining acute accent)
+// look the same on the page and are treated by some file systems
+// as the same path. If we do allow Unicode marks in paths, there
+// must be some kind of normalization to allow only one canonical
+// encoding of any character used in an import path.
package module
// IMPORTANT NOTE
@@ -24,22 +102,91 @@ import (
"unicode"
"unicode/utf8"
- "golang.org/x/tools/internal/semver"
+ "golang.org/x/mod/semver"
+ errors "golang.org/x/xerrors"
)
-// A Version is defined by a module path and version pair.
+// A Version (for clients, a module.Version) is defined by a module path and version pair.
+// These are stored in their plain (unescaped) form.
type Version struct {
+ // Path is a module path, like "golang.org/x/text" or "rsc.io/quote/v2".
Path string
// Version is usually a semantic version in canonical form.
- // There are two exceptions to this general rule.
+ // There are three exceptions to this general rule.
// First, the top-level target of a build has no specific version
// and uses Version = "".
// Second, during MVS calculations the version "none" is used
// to represent the decision to take no version of a given module.
+ // Third, filesystem paths found in "replace" directives are
+ // represented by a path with an empty version.
Version string `json:",omitempty"`
}
+// String returns the module version syntax Path@Version.
+func (m Version) String() string {
+ return m.Path + "@" + m.Version
+}
+
+// A ModuleError indicates an error specific to a module.
+type ModuleError struct {
+ Path string
+ Version string
+ Err error
+}
+
+// VersionError returns a ModuleError derived from a Version and error,
+// or err itself if it is already such an error.
+func VersionError(v Version, err error) error {
+ var mErr *ModuleError
+ if errors.As(err, &mErr) && mErr.Path == v.Path && mErr.Version == v.Version {
+ return err
+ }
+ return &ModuleError{
+ Path: v.Path,
+ Version: v.Version,
+ Err: err,
+ }
+}
+
+func (e *ModuleError) Error() string {
+ if v, ok := e.Err.(*InvalidVersionError); ok {
+ return fmt.Sprintf("%s@%s: invalid %s: %v", e.Path, v.Version, v.noun(), v.Err)
+ }
+ if e.Version != "" {
+ return fmt.Sprintf("%s@%s: %v", e.Path, e.Version, e.Err)
+ }
+ return fmt.Sprintf("module %s: %v", e.Path, e.Err)
+}
+
+func (e *ModuleError) Unwrap() error { return e.Err }
+
+// An InvalidVersionError indicates an error specific to a version, with the
+// module path unknown or specified externally.
+//
+// A ModuleError may wrap an InvalidVersionError, but an InvalidVersionError
+// must not wrap a ModuleError.
+type InvalidVersionError struct {
+ Version string
+ Pseudo bool
+ Err error
+}
+
+// noun returns either "version" or "pseudo-version", depending on whether
+// e.Version is a pseudo-version.
+func (e *InvalidVersionError) noun() string {
+ if e.Pseudo {
+ return "pseudo-version"
+ }
+ return "version"
+}
+
+func (e *InvalidVersionError) Error() string {
+ return fmt.Sprintf("%s %q invalid: %s", e.noun(), e.Version, e.Err)
+}
+
+func (e *InvalidVersionError) Unwrap() error { return e.Err }
+
// Check checks that a given module path, version pair is valid.
// In addition to the path being a valid module path
// and the version being a valid semantic version,
@@ -51,17 +198,14 @@ func Check(path, version string) error {
return err
}
if !semver.IsValid(version) {
- return fmt.Errorf("malformed semantic version %v", version)
+ return &ModuleError{
+ Path: path,
+ Err: &InvalidVersionError{Version: version, Err: errors.New("not a semantic version")},
+ }
}
_, pathMajor, _ := SplitPathVersion(path)
- if !MatchPathMajor(version, pathMajor) {
- if pathMajor == "" {
- pathMajor = "v0 or v1"
- }
- if pathMajor[0] == '.' { // .v1
- pathMajor = pathMajor[1:]
- }
- return fmt.Errorf("mismatched module path %v and version %v (want %v)", path, version, pathMajor)
+ if err := CheckPathMajor(version, pathMajor); err != nil {
+ return &ModuleError{Path: path, Err: err}
}
return nil
}
@@ -79,7 +223,7 @@ func firstPathOK(r rune) bool {
// Paths can be ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~.
// This matches what "go get" has historically recognized in import paths.
// TODO(rsc): We would like to allow Unicode letters, but that requires additional
-// care in the safe encoding (see note below).
+// care in the safe encoding (see "escaped paths" above).
func pathOK(r rune) bool {
if r < utf8.RuneSelf {
return r == '+' || r == '-' || r == '.' || r == '_' || r == '~' ||
@@ -94,7 +238,7 @@ func pathOK(r rune) bool {
// For now we allow all Unicode letters but otherwise limit to pathOK plus a few more punctuation characters.
// If we expand the set of allowed characters here, we have to
// work harder at detecting potential case-folding and normalization collisions.
-// See note about "safe encoding" below.
+// See note about "escaped paths" above.
func fileNameOK(r rune) bool {
if r < utf8.RuneSelf {
// Entire set of ASCII punctuation, from which we remove characters:
@@ -120,6 +264,17 @@ func fileNameOK(r rune) bool {
}
// CheckPath checks that a module path is valid.
+// A valid module path is a valid import path, as checked by CheckImportPath,
+// with two additional constraints.
+// First, the leading path element (up to the first slash, if any),
+// by convention a domain name, must contain only lower-case ASCII letters,
+// ASCII digits, dots (U+002E), and dashes (U+002D);
+// it must contain at least one dot and cannot start with a dash.
+// Second, for a final path element of the form /vN, where N looks numeric
+// (ASCII digits and dots) must not begin with a leading zero, must not be /v1,
+// and must not contain any dots. For paths beginning with "gopkg.in/",
+// this second requirement is replaced by a requirement that the path
+// follow the gopkg.in server's conventions.
func CheckPath(path string) error {
if err := checkPath(path, false); err != nil {
return fmt.Errorf("malformed module path %q: %v", path, err)
@@ -149,6 +304,20 @@ func CheckPath(path string) error {
}
// CheckImportPath checks that an import path is valid.
+//
+// A valid import path consists of one or more valid path elements
+// separated by slashes (U+002F). (It must not begin with nor end in a slash.)
+//
+// A valid path element is a non-empty string made up of
+// ASCII letters, ASCII digits, and limited ASCII punctuation: + - . _ and ~.
+// It must not begin or end with a dot (U+002E), nor contain two dots in a row.
+//
+// The element prefix up to the first dot must not be a reserved file name
+// on Windows, regardless of case (CON, com1, NuL, and so on).
+//
+// CheckImportPath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
func CheckImportPath(path string) error {
if err := checkPath(path, false); err != nil {
return fmt.Errorf("malformed import path %q: %v", path, err)
@@ -169,8 +338,8 @@ func checkPath(path string, fileName bool) error {
if path == "" {
return fmt.Errorf("empty string")
}
- if strings.Contains(path, "..") {
- return fmt.Errorf("double dot")
+ if path[0] == '-' {
+ return fmt.Errorf("leading dash")
}
if strings.Contains(path, "//") {
return fmt.Errorf("double slash")
@@ -226,13 +395,24 @@ func checkElem(elem string, fileName bool) error {
}
for _, bad := range badWindowsNames {
if strings.EqualFold(bad, short) {
- return fmt.Errorf("disallowed path element %q", elem)
+ return fmt.Errorf("%q disallowed as path element component on Windows", short)
}
}
return nil
}
-// CheckFilePath checks whether a slash-separated file path is valid.
+// CheckFilePath checks that a slash-separated file path is valid.
+// The definition of a valid file path is the same as the definition
+// of a valid import path except that the set of allowed characters is larger:
+// all Unicode letters, ASCII digits, the ASCII space character (U+0020),
+// and the ASCII punctuation characters
+// “!#$%&()+,-.=@[]^_{}~”.
+// (The excluded punctuation characters, " * < > ? ` ' | / \ and :,
+// have special meanings in certain shells or operating systems.)
+//
+// CheckFilePath may be less restrictive in the future, but see the
+// top-level package documentation for additional information about
+// subtleties of Unicode.
func CheckFilePath(path string) error {
if err := checkPath(path, true); err != nil {
return fmt.Errorf("malformed file path %q: %v", path, err)
@@ -271,6 +451,9 @@ var badWindowsNames = []string{
// and version is either empty or "/vN" for N >= 2.
// As a special case, gopkg.in paths are recognized directly;
// they require ".vN" instead of "/vN", and for all N, not just N >= 2.
+// SplitPathVersion returns with ok = false when presented with
+// a path whose last path element does not satisfy the constraints
+// applied by CheckPath, such as "example.com/pkg/v1" or "example.com/pkg/v1.2".
func SplitPathVersion(path string) (prefix, pathMajor string, ok bool) {
if strings.HasPrefix(path, "gopkg.in/") {
return splitGopkgIn(path)
@@ -319,20 +502,65 @@ func splitGopkgIn(path string) (prefix, pathMajor string, ok bool) {
// MatchPathMajor reports whether the semantic version v
// matches the path major version pathMajor.
+//
+// MatchPathMajor returns true if and only if CheckPathMajor returns nil.
func MatchPathMajor(v, pathMajor string) bool {
+ return CheckPathMajor(v, pathMajor) == nil
+}
+
+// CheckPathMajor returns a non-nil error if the semantic version v
+// does not match the path major version pathMajor.
+func CheckPathMajor(v, pathMajor string) error {
+ // TODO(jayconrod): return errors or panic for invalid inputs. This function
+ // (and others) was covered by integration tests for cmd/go, and surrounding
+ // code protected against invalid inputs like non-canonical versions.
if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
}
if strings.HasPrefix(v, "v0.0.0-") && pathMajor == ".v1" {
// Allow old bug in pseudo-versions that generated v0.0.0- pseudoversion for gopkg .v1.
// For example, gopkg.in/yaml.v2@v2.2.1's go.mod requires gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405.
- return true
+ return nil
}
m := semver.Major(v)
if pathMajor == "" {
- return m == "v0" || m == "v1" || semver.Build(v) == "+incompatible"
+ if m == "v0" || m == "v1" || semver.Build(v) == "+incompatible" {
+ return nil
+ }
+ pathMajor = "v0 or v1"
+ } else if pathMajor[0] == '/' || pathMajor[0] == '.' {
+ if m == pathMajor[1:] {
+ return nil
+ }
+ pathMajor = pathMajor[1:]
+ }
+ return &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("should be %s, not %s", pathMajor, semver.Major(v)),
}
- return (pathMajor[0] == '/' || pathMajor[0] == '.') && m == pathMajor[1:]
+}
+
+// PathMajorPrefix returns the major-version tag prefix implied by pathMajor.
+// An empty PathMajorPrefix allows either v0 or v1.
+//
+// Note that MatchPathMajor may accept some versions that do not actually begin
+// with this prefix: namely, it accepts a 'v0.0.0-' prefix for a '.v1'
+// pathMajor, even though that pathMajor implies 'v1' tagging.
+func PathMajorPrefix(pathMajor string) string {
+ if pathMajor == "" {
+ return ""
+ }
+ if pathMajor[0] != '/' && pathMajor[0] != '.' {
+ panic("pathMajor suffix " + pathMajor + " passed to PathMajorPrefix lacks separator")
+ }
+ if strings.HasPrefix(pathMajor, ".v") && strings.HasSuffix(pathMajor, "-unstable") {
+ pathMajor = strings.TrimSuffix(pathMajor, "-unstable")
+ }
+ m := pathMajor[1:]
+ if m != semver.Major(m) {
+ panic("pathMajor suffix " + pathMajor + "passed to PathMajorPrefix is not a valid major version")
+ }
+ return m
}
// CanonicalVersion returns the canonical form of the version string v.
@@ -345,7 +573,10 @@ func CanonicalVersion(v string) string {
return cv
}
-// Sort sorts the list by Path, breaking ties by comparing Versions.
+// Sort sorts the list by Path, breaking ties by comparing Version fields.
+// The Version fields are interpreted as semantic versions (using semver.Compare)
+// optionally followed by a tie-breaking suffix introduced by a slash character,
+// like in "v0.0.1/go.mod".
func Sort(list []Version) {
sort.Slice(list, func(i, j int) bool {
mi := list[i]
@@ -372,93 +603,36 @@ func Sort(list []Version) {
})
}
-// Safe encodings
-//
-// Module paths appear as substrings of file system paths
-// (in the download cache) and of web server URLs in the proxy protocol.
-// In general we cannot rely on file systems to be case-sensitive,
-// nor can we rely on web servers, since they read from file systems.
-// That is, we cannot rely on the file system to keep rsc.io/QUOTE
-// and rsc.io/quote separate. Windows and macOS don't.
-// Instead, we must never require two different casings of a file path.
-// Because we want the download cache to match the proxy protocol,
-// and because we want the proxy protocol to be possible to serve
-// from a tree of static files (which might be stored on a case-insensitive
-// file system), the proxy protocol must never require two different casings
-// of a URL path either.
-//
-// One possibility would be to make the safe encoding be the lowercase
-// hexadecimal encoding of the actual path bytes. This would avoid ever
-// needing different casings of a file path, but it would be fairly illegible
-// to most programmers when those paths appeared in the file system
-// (including in file paths in compiler errors and stack traces)
-// in web server logs, and so on. Instead, we want a safe encoding that
-// leaves most paths unaltered.
-//
-// The safe encoding is this:
-// replace every uppercase letter with an exclamation mark
-// followed by the letter's lowercase equivalent.
-//
-// For example,
-// github.com/Azure/azure-sdk-for-go -> github.com/!azure/azure-sdk-for-go.
-// github.com/GoogleCloudPlatform/cloudsql-proxy -> github.com/!google!cloud!platform/cloudsql-proxy
-// github.com/Sirupsen/logrus -> github.com/!sirupsen/logrus.
-//
-// Import paths that avoid upper-case letters are left unchanged.
-// Note that because import paths are ASCII-only and avoid various
-// problematic punctuation (like : < and >), the safe encoding is also ASCII-only
-// and avoids the same problematic punctuation.
-//
-// Import paths have never allowed exclamation marks, so there is no
-// need to define how to encode a literal !.
-//
-// Although paths are disallowed from using Unicode (see pathOK above),
-// the eventual plan is to allow Unicode letters as well, to assume that
-// file systems and URLs are Unicode-safe (storing UTF-8), and apply
-// the !-for-uppercase convention. Note however that not all runes that
-// are different but case-fold equivalent are an upper/lower pair.
-// For example, U+004B ('K'), U+006B ('k'), and U+212A ('K' for Kelvin)
-// are considered to case-fold to each other. When we do add Unicode
-// letters, we must not assume that upper/lower are the only case-equivalent pairs.
-// Perhaps the Kelvin symbol would be disallowed entirely, for example.
-// Or perhaps it would encode as "!!k", or perhaps as "(212A)".
-//
-// Also, it would be nice to allow Unicode marks as well as letters,
-// but marks include combining marks, and then we must deal not
-// only with case folding but also normalization: both U+00E9 ('é')
-// and U+0065 U+0301 ('e' followed by combining acute accent)
-// look the same on the page and are treated by some file systems
-// as the same path. If we do allow Unicode marks in paths, there
-// must be some kind of normalization to allow only one canonical
-// encoding of any character used in an import path.
-
-// EncodePath returns the safe encoding of the given module path.
+// EscapePath returns the escaped form of the given module path.
// It fails if the module path is invalid.
-func EncodePath(path string) (encoding string, err error) {
+func EscapePath(path string) (escaped string, err error) {
if err := CheckPath(path); err != nil {
return "", err
}
- return encodeString(path)
+ return escapeString(path)
}
-// EncodeVersion returns the safe encoding of the given module version.
+// EscapeVersion returns the escaped form of the given module version.
// Versions are allowed to be in non-semver form but must be valid file names
// and not contain exclamation marks.
-func EncodeVersion(v string) (encoding string, err error) {
+func EscapeVersion(v string) (escaped string, err error) {
if err := checkElem(v, true); err != nil || strings.Contains(v, "!") {
- return "", fmt.Errorf("disallowed version string %q", v)
+ return "", &InvalidVersionError{
+ Version: v,
+ Err: fmt.Errorf("disallowed version string"),
+ }
}
- return encodeString(v)
+ return escapeString(v)
}
-func encodeString(s string) (encoding string, err error) {
+func escapeString(s string) (escaped string, err error) {
haveUpper := false
for _, r := range s {
if r == '!' || r >= utf8.RuneSelf {
// This should be disallowed by CheckPath, but diagnose anyway.
- // The correctness of the encoding loop below depends on it.
- return "", fmt.Errorf("internal error: inconsistency in EncodePath")
+ // The correctness of the escaping loop below depends on it.
+ return "", fmt.Errorf("internal error: inconsistency in EscapePath")
}
if 'A' <= r && r <= 'Z' {
haveUpper = true
@@ -480,39 +654,39 @@ func encodeString(s string) (encoding string, err error) {
return string(buf), nil
}
-// DecodePath returns the module path of the given safe encoding.
-// It fails if the encoding is invalid or encodes an invalid path.
-func DecodePath(encoding string) (path string, err error) {
- path, ok := decodeString(encoding)
+// UnescapePath returns the module path for the given escaped path.
+// It fails if the escaped path is invalid or describes an invalid path.
+func UnescapePath(escaped string) (path string, err error) {
+ path, ok := unescapeString(escaped)
if !ok {
- return "", fmt.Errorf("invalid module path encoding %q", encoding)
+ return "", fmt.Errorf("invalid escaped module path %q", escaped)
}
if err := CheckPath(path); err != nil {
- return "", fmt.Errorf("invalid module path encoding %q: %v", encoding, err)
+ return "", fmt.Errorf("invalid escaped module path %q: %v", escaped, err)
}
return path, nil
}
-// DecodeVersion returns the version string for the given safe encoding.
-// It fails if the encoding is invalid or encodes an invalid version.
+// UnescapeVersion returns the version string for the given escaped version.
+// It fails if the escaped form is invalid or describes an invalid version.
// Versions are allowed to be in non-semver form but must be valid file names
// and not contain exclamation marks.
-func DecodeVersion(encoding string) (v string, err error) {
- v, ok := decodeString(encoding)
+func UnescapeVersion(escaped string) (v string, err error) {
+ v, ok := unescapeString(escaped)
if !ok {
- return "", fmt.Errorf("invalid version encoding %q", encoding)
+ return "", fmt.Errorf("invalid escaped version %q", escaped)
}
if err := checkElem(v, true); err != nil {
- return "", fmt.Errorf("disallowed version string %q", v)
+ return "", fmt.Errorf("invalid escaped version %q: %v", v, err)
}
return v, nil
}
-func decodeString(encoding string) (string, bool) {
+func unescapeString(escaped string) (string, bool) {
var buf []byte
bang := false
- for _, r := range encoding {
+ for _, r := range escaped {
if r >= utf8.RuneSelf {
return "", false
}
diff --git a/vendor/golang.org/x/tools/internal/semver/semver.go b/vendor/golang.org/x/mod/semver/semver.go
index 4af7118e55..2988e3cf9c 100644
--- a/vendor/golang.org/x/tools/internal/semver/semver.go
+++ b/vendor/golang.org/x/mod/semver/semver.go
@@ -107,7 +107,7 @@ func Build(v string) string {
}
// Compare returns an integer comparing two versions according to
-// according to semantic version precedence.
+// semantic version precedence.
// The result will be 0 if v == w, -1 if v < w, or +1 if v > w.
//
// An invalid semantic version string is considered less than a valid one.
@@ -263,7 +263,7 @@ func parseBuild(v string) (t, rest string, ok bool) {
i := 1
start := 1
for i < len(v) {
- if !isIdentChar(v[i]) {
+ if !isIdentChar(v[i]) && v[i] != '.' {
return
}
if v[i] == '.' {
diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
index 3e4b195368..2087ceec9c 100644
--- a/vendor/golang.org/x/tools/go/ast/astutil/imports.go
+++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go
@@ -275,9 +275,10 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
// We deleted an entry but now there may be
// a blank line-sized hole where the import was.
- if line-lastLine > 1 {
+ if line-lastLine > 1 || !gen.Rparen.IsValid() {
// There was a blank line immediately preceding the deleted import,
- // so there's no need to close the hole.
+ // so there's no need to close the hole. The right parenthesis is
+ // invalid after AddImport to an import statement without parenthesis.
// Do nothing.
} else if line != fset.File(gen.Rparen).LineCount() {
// There was no blank line. Close the hole.
diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
index 0f652ea6fb..5db8b30967 100644
--- a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
+++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go
@@ -2,9 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
-package cgo
-
-// This file handles cgo preprocessing of files containing `import "C"`.
+// Package cgo handles cgo preprocessing of files containing `import "C"`.
//
// DESIGN
//
@@ -51,6 +49,8 @@ package cgo
// its handling of function calls, analogous to the treatment of map
// lookups in which y=m[k] and y,ok=m[k] are both legal.
+package cgo
+
import (
"fmt"
"go/ast"
diff --git a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
index 9cf186605f..8dcd8bbb71 100644
--- a/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
+++ b/vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
@@ -344,7 +344,7 @@ func (p *parser) expectKeyword(keyword string) {
// PackageId = string_lit .
//
-func (p *parser) parsePackageId() string {
+func (p *parser) parsePackageID() string {
id, err := strconv.Unquote(p.expect(scanner.String))
if err != nil {
p.error(err)
@@ -384,7 +384,7 @@ func (p *parser) parseDotIdent() string {
//
func (p *parser) parseQualifiedName() (id, name string) {
p.expect('@')
- id = p.parsePackageId()
+ id = p.parsePackageID()
p.expect('.')
// Per rev f280b8a485fd (10/2/2013), qualified names may be used for anonymous fields.
if p.tok == '?' {
@@ -696,7 +696,7 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
// Complete requires the type's embedded interfaces to be fully defined,
// but we do not define any
- return types.NewInterface(methods, nil).Complete()
+ return newInterface(methods, nil).Complete()
}
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
@@ -785,7 +785,7 @@ func (p *parser) parseType(parent *types.Package) types.Type {
func (p *parser) parseImportDecl() {
p.expectKeyword("import")
name := p.parsePackageName()
- p.getPkg(p.parsePackageId(), name)
+ p.getPkg(p.parsePackageID(), name)
}
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
diff --git a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
index db0c9a7ea6..5ee692d383 100644
--- a/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
+++ b/vendor/golang.org/x/tools/go/internal/packagesdriver/sizes.go
@@ -11,11 +11,10 @@ import (
"encoding/json"
"fmt"
"go/types"
- "log"
- "os"
"os/exec"
"strings"
- "time"
+
+ "golang.org/x/tools/internal/gocommand"
)
var debug = false
@@ -78,97 +77,42 @@ func GetSizes(ctx context.Context, buildFlags, env []string, dir string, usesExp
}
func GetSizesGolist(ctx context.Context, buildFlags, env []string, dir string, usesExportData bool) (types.Sizes, error) {
- args := []string{"list", "-f", "{{context.GOARCH}} {{context.Compiler}}"}
- args = append(args, buildFlags...)
- args = append(args, "--", "unsafe")
- stdout, stderr, err := invokeGo(ctx, env, dir, usesExportData, args...)
+ inv := gocommand.Invocation{
+ Verb: "list",
+ Args: []string{"-f", "{{context.GOARCH}} {{context.Compiler}}", "--", "unsafe"},
+ Env: env,
+ BuildFlags: buildFlags,
+ WorkingDir: dir,
+ }
+ stdout, stderr, friendlyErr, rawErr := inv.RunRaw(ctx)
var goarch, compiler string
- if err != nil {
- if strings.Contains(err.Error(), "cannot find main module") {
+ if rawErr != nil {
+ if strings.Contains(rawErr.Error(), "cannot find main module") {
// User's running outside of a module. All bets are off. Get GOARCH and guess compiler is gc.
// TODO(matloob): Is this a problem in practice?
- envout, _, enverr := invokeGo(ctx, env, dir, usesExportData, "env", "GOARCH")
+ inv := gocommand.Invocation{
+ Verb: "env",
+ Args: []string{"GOARCH"},
+ Env: env,
+ WorkingDir: dir,
+ }
+ envout, enverr := inv.Run(ctx)
if enverr != nil {
- return nil, err
+ return nil, enverr
}
goarch = strings.TrimSpace(envout.String())
compiler = "gc"
} else {
- return nil, err
+ return nil, friendlyErr
}
} else {
fields := strings.Fields(stdout.String())
if len(fields) < 2 {
- return nil, fmt.Errorf("could not parse GOARCH and Go compiler in format \"<GOARCH> <compiler>\" from stdout of go command:\n%s\ndir: %s\nstdout: <<%s>>\nstderr: <<%s>>",
- cmdDebugStr(env, args...), dir, stdout.String(), stderr.String())
+ return nil, fmt.Errorf("could not parse GOARCH and Go compiler in format \"<GOARCH> <compiler>\":\nstdout: <<%s>>\nstderr: <<%s>>",
+ stdout.String(), stderr.String())
}
goarch = fields[0]
compiler = fields[1]
}
return types.SizesFor(compiler, goarch), nil
}
-
-// invokeGo returns the stdout and stderr of a go command invocation.
-func invokeGo(ctx context.Context, env []string, dir string, usesExportData bool, args ...string) (*bytes.Buffer, *bytes.Buffer, error) {
- if debug {
- defer func(start time.Time) { log.Printf("%s for %v", time.Since(start), cmdDebugStr(env, args...)) }(time.Now())
- }
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- cmd := exec.CommandContext(ctx, "go", args...)
- // On darwin the cwd gets resolved to the real path, which breaks anything that
- // expects the working directory to keep the original path, including the
- // go command when dealing with modules.
- // The Go stdlib has a special feature where if the cwd and the PWD are the
- // same node then it trusts the PWD, so by setting it in the env for the child
- // process we fix up all the paths returned by the go command.
- cmd.Env = append(append([]string{}, env...), "PWD="+dir)
- cmd.Dir = dir
- cmd.Stdout = stdout
- cmd.Stderr = stderr
- if err := cmd.Run(); err != nil {
- exitErr, ok := err.(*exec.ExitError)
- if !ok {
- // Catastrophic error:
- // - executable not found
- // - context cancellation
- return nil, nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
- }
-
- // Export mode entails a build.
- // If that build fails, errors appear on stderr
- // (despite the -e flag) and the Export field is blank.
- // Do not fail in that case.
- if !usesExportData {
- return nil, nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
- }
- }
-
- // As of writing, go list -export prints some non-fatal compilation
- // errors to stderr, even with -e set. We would prefer that it put
- // them in the Package.Error JSON (see https://golang.org/issue/26319).
- // In the meantime, there's nowhere good to put them, but they can
- // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
- // is set.
- if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
- fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(env, args...), stderr)
- }
-
- // debugging
- if false {
- fmt.Fprintf(os.Stderr, "%s stdout: <<%s>>\n", cmdDebugStr(env, args...), stdout)
- }
-
- return stdout, stderr, nil
-}
-
-func cmdDebugStr(envlist []string, args ...string) string {
- env := make(map[string]string)
- for _, kv := range envlist {
- split := strings.Split(kv, "=")
- k, v := split[0], split[1]
- env[k] = v
- }
-
- return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["PWD"], args)
-}
diff --git a/vendor/golang.org/x/tools/go/packages/golist.go b/vendor/golang.org/x/tools/go/packages/golist.go
index 9c895b3895..b4a13ef454 100644
--- a/vendor/golang.org/x/tools/go/packages/golist.go
+++ b/vendor/golang.org/x/tools/go/packages/golist.go
@@ -6,26 +6,25 @@ package packages
import (
"bytes"
+ "context"
"encoding/json"
"fmt"
"go/types"
- "io/ioutil"
"log"
"os"
"os/exec"
"path"
"path/filepath"
"reflect"
- "regexp"
+ "sort"
"strconv"
"strings"
"sync"
- "time"
"unicode"
"golang.org/x/tools/go/internal/packagesdriver"
- "golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/semver"
+ "golang.org/x/tools/internal/gocommand"
+ "golang.org/x/tools/internal/packagesinternal"
)
// debug controls verbose logging.
@@ -44,16 +43,21 @@ type responseDeduper struct {
dr *driverResponse
}
-// init fills in r with a driverResponse.
-func (r *responseDeduper) init(dr *driverResponse) {
- r.dr = dr
- r.seenRoots = map[string]bool{}
- r.seenPackages = map[string]*Package{}
+func newDeduper() *responseDeduper {
+ return &responseDeduper{
+ dr: &driverResponse{},
+ seenRoots: map[string]bool{},
+ seenPackages: map[string]*Package{},
+ }
+}
+
+// addAll fills in r with a driverResponse.
+func (r *responseDeduper) addAll(dr *driverResponse) {
for _, pkg := range dr.Packages {
- r.seenPackages[pkg.ID] = pkg
+ r.addPackage(pkg)
}
for _, root := range dr.Roots {
- r.seenRoots[root] = true
+ r.addRoot(root)
}
}
@@ -73,25 +77,47 @@ func (r *responseDeduper) addRoot(id string) {
r.dr.Roots = append(r.dr.Roots, id)
}
-// goInfo contains global information from the go tool.
-type goInfo struct {
- rootDirs map[string]string
- env goEnv
+type golistState struct {
+ cfg *Config
+ ctx context.Context
+
+ envOnce sync.Once
+ goEnvError error
+ goEnv map[string]string
+
+ rootsOnce sync.Once
+ rootDirsError error
+ rootDirs map[string]string
+
+ // vendorDirs caches the (non)existence of vendor directories.
+ vendorDirs map[string]bool
}
-type goEnv struct {
- modulesOn bool
+// getEnv returns Go environment variables. Only specific variables are
+// populated -- computing all of them is slow.
+func (state *golistState) getEnv() (map[string]string, error) {
+ state.envOnce.Do(func() {
+ var b *bytes.Buffer
+ b, state.goEnvError = state.invokeGo("env", "-json", "GOMOD", "GOPATH")
+ if state.goEnvError != nil {
+ return
+ }
+
+ state.goEnv = make(map[string]string)
+ decoder := json.NewDecoder(b)
+ if state.goEnvError = decoder.Decode(&state.goEnv); state.goEnvError != nil {
+ return
+ }
+ })
+ return state.goEnv, state.goEnvError
}
-func determineEnv(cfg *Config) goEnv {
- buf, err := invokeGo(cfg, "env", "GOMOD")
+// mustGetEnv is a convenience function that can be used if getEnv has already succeeded.
+func (state *golistState) mustGetEnv() map[string]string {
+ env, err := state.getEnv()
if err != nil {
- return goEnv{}
+ panic(fmt.Sprintf("mustGetEnv: %v", err))
}
- gomod := bytes.TrimSpace(buf.Bytes())
-
- env := goEnv{}
- env.modulesOn = len(gomod) > 0
return env
}
@@ -99,47 +125,38 @@ func determineEnv(cfg *Config) goEnv {
// the build system package structure.
// See driver for more details.
func goListDriver(cfg *Config, patterns ...string) (*driverResponse, error) {
- var sizes types.Sizes
+ // Make sure that any asynchronous go commands are killed when we return.
+ parentCtx := cfg.Context
+ if parentCtx == nil {
+ parentCtx = context.Background()
+ }
+ ctx, cancel := context.WithCancel(parentCtx)
+ defer cancel()
+
+ response := newDeduper()
+
+ // Fill in response.Sizes asynchronously if necessary.
var sizeserr error
var sizeswg sync.WaitGroup
if cfg.Mode&NeedTypesSizes != 0 || cfg.Mode&NeedTypes != 0 {
sizeswg.Add(1)
go func() {
- sizes, sizeserr = getSizes(cfg)
+ var sizes types.Sizes
+ sizes, sizeserr = packagesdriver.GetSizesGolist(ctx, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
+ // types.SizesFor always returns nil or a *types.StdSizes.
+ response.dr.Sizes, _ = sizes.(*types.StdSizes)
sizeswg.Done()
}()
}
- defer sizeswg.Wait()
-
- // start fetching rootDirs
- var info goInfo
- var rootDirsReady, envReady = make(chan struct{}), make(chan struct{})
- go func() {
- info.rootDirs = determineRootDirs(cfg)
- close(rootDirsReady)
- }()
- go func() {
- info.env = determineEnv(cfg)
- close(envReady)
- }()
- getGoInfo := func() *goInfo {
- <-rootDirsReady
- <-envReady
- return &info
- }
-
- // Ensure that we don't leak goroutines: Load is synchronous, so callers will
- // not expect it to access the fields of cfg after the call returns.
- defer getGoInfo()
- // always pass getGoInfo to golistDriver
- golistDriver := func(cfg *Config, patterns ...string) (*driverResponse, error) {
- return golistDriver(cfg, getGoInfo, patterns...)
+ state := &golistState{
+ cfg: cfg,
+ ctx: ctx,
+ vendorDirs: map[string]bool{},
}
// Determine files requested in contains patterns
var containFiles []string
- var packagesNamed []string
restPatterns := make([]string, 0, len(patterns))
// Extract file= and other [querytype]= patterns. Report an error if querytype
// doesn't exist.
@@ -155,8 +172,6 @@ extractQueries:
containFiles = append(containFiles, value)
case "pattern":
restPatterns = append(restPatterns, value)
- case "iamashamedtousethedisabledqueryname":
- packagesNamed = append(packagesNamed, value)
case "": // not a reserved query
restPatterns = append(restPatterns, pattern)
default:
@@ -172,52 +187,34 @@ extractQueries:
}
}
- response := &responseDeduper{}
- var err error
-
// See if we have any patterns to pass through to go list. Zero initial
// patterns also requires a go list call, since it's the equivalent of
// ".".
if len(restPatterns) > 0 || len(patterns) == 0 {
- dr, err := golistDriver(cfg, restPatterns...)
+ dr, err := state.createDriverResponse(restPatterns...)
if err != nil {
return nil, err
}
- response.init(dr)
- } else {
- response.init(&driverResponse{})
- }
-
- sizeswg.Wait()
- if sizeserr != nil {
- return nil, sizeserr
+ response.addAll(dr)
}
- // types.SizesFor always returns nil or a *types.StdSizes
- response.dr.Sizes, _ = sizes.(*types.StdSizes)
-
- var containsCandidates []string
if len(containFiles) != 0 {
- if err := runContainsQueries(cfg, golistDriver, response, containFiles, getGoInfo); err != nil {
+ if err := state.runContainsQueries(response, containFiles); err != nil {
return nil, err
}
}
- if len(packagesNamed) != 0 {
- if err := runNamedQueries(cfg, golistDriver, response, packagesNamed); err != nil {
- return nil, err
- }
- }
-
- modifiedPkgs, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
+ modifiedPkgs, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return nil, err
}
+
+ var containsCandidates []string
if len(containFiles) > 0 {
containsCandidates = append(containsCandidates, modifiedPkgs...)
containsCandidates = append(containsCandidates, needPkgs...)
}
- if err := addNeededOverlayPackages(cfg, golistDriver, response, needPkgs, getGoInfo); err != nil {
+ if err := state.addNeededOverlayPackages(response, needPkgs); err != nil {
return nil, err
}
// Check candidate packages for containFiles.
@@ -246,28 +243,32 @@ extractQueries:
}
}
+ sizeswg.Wait()
+ if sizeserr != nil {
+ return nil, sizeserr
+ }
return response.dr, nil
}
-func addNeededOverlayPackages(cfg *Config, driver driver, response *responseDeduper, pkgs []string, getGoInfo func() *goInfo) error {
+func (state *golistState) addNeededOverlayPackages(response *responseDeduper, pkgs []string) error {
if len(pkgs) == 0 {
return nil
}
- dr, err := driver(cfg, pkgs...)
+ dr, err := state.createDriverResponse(pkgs...)
if err != nil {
return err
}
for _, pkg := range dr.Packages {
response.addPackage(pkg)
}
- _, needPkgs, err := processGolistOverlay(cfg, response, getGoInfo)
+ _, needPkgs, err := state.processGolistOverlay(response)
if err != nil {
return err
}
- return addNeededOverlayPackages(cfg, driver, response, needPkgs, getGoInfo)
+ return state.addNeededOverlayPackages(response, needPkgs)
}
-func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, queries []string, goInfo func() *goInfo) error {
+func (state *golistState) runContainsQueries(response *responseDeduper, queries []string) error {
for _, query := range queries {
// TODO(matloob): Do only one query per directory.
fdir := filepath.Dir(query)
@@ -277,44 +278,17 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
if err != nil {
return fmt.Errorf("could not determine absolute path of file= query path %q: %v", query, err)
}
- dirResponse, err := driver(cfg, pattern)
- if err != nil || (len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].Errors) == 1) {
- // There was an error loading the package. Try to load the file as an ad-hoc package.
- // Usually the error will appear in a returned package, but may not if we're in modules mode
- // and the ad-hoc is located outside a module.
+ dirResponse, err := state.createDriverResponse(pattern)
+
+ // If there was an error loading the package, or the package is returned
+ // with errors, try to load the file as an ad-hoc package.
+ // Usually the error will appear in a returned package, but may not if we're
+ // in module mode and the ad-hoc is located outside a module.
+ if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
+ len(dirResponse.Packages[0].Errors) == 1 {
var queryErr error
- dirResponse, queryErr = driver(cfg, query)
- if queryErr != nil {
- // Return the original error if the attempt to fall back failed.
- return err
- }
- // If we get nothing back from `go list`, try to make this file into its own ad-hoc package.
- if len(dirResponse.Packages) == 0 && queryErr == nil {
- dirResponse.Packages = append(dirResponse.Packages, &Package{
- ID: "command-line-arguments",
- PkgPath: query,
- GoFiles: []string{query},
- CompiledGoFiles: []string{query},
- Imports: make(map[string]*Package),
- })
- dirResponse.Roots = append(dirResponse.Roots, "command-line-arguments")
- }
- // Special case to handle issue #33482:
- // If this is a file= query for ad-hoc packages where the file only exists on an overlay,
- // and exists outside of a module, add the file in for the package.
- if len(dirResponse.Packages) == 1 && (dirResponse.Packages[0].ID == "command-line-arguments" ||
- filepath.ToSlash(dirResponse.Packages[0].PkgPath) == filepath.ToSlash(query)) {
- if len(dirResponse.Packages[0].GoFiles) == 0 {
- filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
- // TODO(matloob): check if the file is outside of a root dir?
- for path := range cfg.Overlay {
- if path == filename {
- dirResponse.Packages[0].Errors = nil
- dirResponse.Packages[0].GoFiles = []string{path}
- dirResponse.Packages[0].CompiledGoFiles = []string{path}
- }
- }
- }
+ if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
+ return err // return the original error
}
}
isRoot := make(map[string]bool, len(dirResponse.Roots))
@@ -342,276 +316,47 @@ func runContainsQueries(cfg *Config, driver driver, response *responseDeduper, q
return nil
}
-// modCacheRegexp splits a path in a module cache into module, module version, and package.
-var modCacheRegexp = regexp.MustCompile(`(.*)@([^/\\]*)(.*)`)
-
-func runNamedQueries(cfg *Config, driver driver, response *responseDeduper, queries []string) error {
- // calling `go env` isn't free; bail out if there's nothing to do.
- if len(queries) == 0 {
- return nil
- }
- // Determine which directories are relevant to scan.
- roots, modRoot, err := roots(cfg)
- if err != nil {
- return err
- }
-
- // Scan the selected directories. Simple matches, from GOPATH/GOROOT
- // or the local module, can simply be "go list"ed. Matches from the
- // module cache need special treatment.
- var matchesMu sync.Mutex
- var simpleMatches, modCacheMatches []string
- add := func(root gopathwalk.Root, dir string) {
- // Walk calls this concurrently; protect the result slices.
- matchesMu.Lock()
- defer matchesMu.Unlock()
-
- path := dir
- if dir != root.Path {
- path = dir[len(root.Path)+1:]
- }
- if pathMatchesQueries(path, queries) {
- switch root.Type {
- case gopathwalk.RootModuleCache:
- modCacheMatches = append(modCacheMatches, path)
- case gopathwalk.RootCurrentModule:
- // We'd need to read go.mod to find the full
- // import path. Relative's easier.
- rel, err := filepath.Rel(cfg.Dir, dir)
- if err != nil {
- // This ought to be impossible, since
- // we found dir in the current module.
- panic(err)
- }
- simpleMatches = append(simpleMatches, "./"+rel)
- case gopathwalk.RootGOPATH, gopathwalk.RootGOROOT:
- simpleMatches = append(simpleMatches, path)
- }
- }
- }
-
- startWalk := time.Now()
- gopathwalk.Walk(roots, add, gopathwalk.Options{ModulesEnabled: modRoot != "", Debug: debug})
- cfg.Logf("%v for walk", time.Since(startWalk))
-
- // Weird special case: the top-level package in a module will be in
- // whatever directory the user checked the repository out into. It's
- // more reasonable for that to not match the package name. So, if there
- // are any Go files in the mod root, query it just to be safe.
- if modRoot != "" {
- rel, err := filepath.Rel(cfg.Dir, modRoot)
- if err != nil {
- panic(err) // See above.
- }
-
- files, err := ioutil.ReadDir(modRoot)
- if err != nil {
- panic(err) // See above.
- }
-
- for _, f := range files {
- if strings.HasSuffix(f.Name(), ".go") {
- simpleMatches = append(simpleMatches, rel)
- break
- }
- }
- }
-
- addResponse := func(r *driverResponse) {
- for _, pkg := range r.Packages {
- response.addPackage(pkg)
- for _, name := range queries {
- if pkg.Name == name {
- response.addRoot(pkg.ID)
- break
- }
- }
- }
- }
-
- if len(simpleMatches) != 0 {
- resp, err := driver(cfg, simpleMatches...)
- if err != nil {
- return err
- }
- addResponse(resp)
- }
-
- // Module cache matches are tricky. We want to avoid downloading new
- // versions of things, so we need to use the ones present in the cache.
- // go list doesn't accept version specifiers, so we have to write out a
- // temporary module, and do the list in that module.
- if len(modCacheMatches) != 0 {
- // Collect all the matches, deduplicating by major version
- // and preferring the newest.
- type modInfo struct {
- mod string
- major string
- }
- mods := make(map[modInfo]string)
- var imports []string
- for _, modPath := range modCacheMatches {
- matches := modCacheRegexp.FindStringSubmatch(modPath)
- mod, ver := filepath.ToSlash(matches[1]), matches[2]
- importPath := filepath.ToSlash(filepath.Join(matches[1], matches[3]))
-
- major := semver.Major(ver)
- if prevVer, ok := mods[modInfo{mod, major}]; !ok || semver.Compare(ver, prevVer) > 0 {
- mods[modInfo{mod, major}] = ver
- }
-
- imports = append(imports, importPath)
- }
-
- // Build the temporary module.
- var gomod bytes.Buffer
- gomod.WriteString("module modquery\nrequire (\n")
- for mod, version := range mods {
- gomod.WriteString("\t" + mod.mod + " " + version + "\n")
- }
- gomod.WriteString(")\n")
-
- tmpCfg := *cfg
-
- // We're only trying to look at stuff in the module cache, so
- // disable the network. This should speed things up, and has
- // prevented errors in at least one case, #28518.
- tmpCfg.Env = append([]string{"GOPROXY=off"}, cfg.Env...)
-
- var err error
- tmpCfg.Dir, err = ioutil.TempDir("", "gopackages-modquery")
- if err != nil {
- return err
- }
- defer os.RemoveAll(tmpCfg.Dir)
-
- if err := ioutil.WriteFile(filepath.Join(tmpCfg.Dir, "go.mod"), gomod.Bytes(), 0777); err != nil {
- return fmt.Errorf("writing go.mod for module cache query: %v", err)
- }
-
- // Run the query, using the import paths calculated from the matches above.
- resp, err := driver(&tmpCfg, imports...)
- if err != nil {
- return fmt.Errorf("querying module cache matches: %v", err)
- }
- addResponse(resp)
- }
-
- return nil
-}
-
-func getSizes(cfg *Config) (types.Sizes, error) {
- return packagesdriver.GetSizesGolist(cfg.Context, cfg.BuildFlags, cfg.Env, cfg.Dir, usesExportData(cfg))
-}
-
-// roots selects the appropriate paths to walk based on the passed-in configuration,
-// particularly the environment and the presence of a go.mod in cfg.Dir's parents.
-func roots(cfg *Config) ([]gopathwalk.Root, string, error) {
- stdout, err := invokeGo(cfg, "env", "GOROOT", "GOPATH", "GOMOD")
+// adhocPackage attempts to load or construct an ad-hoc package for a given
+// query, if the original call to the driver produced inadequate results.
+func (state *golistState) adhocPackage(pattern, query string) (*driverResponse, error) {
+ response, err := state.createDriverResponse(query)
if err != nil {
- return nil, "", err
- }
-
- fields := strings.Split(stdout.String(), "\n")
- if len(fields) != 4 || len(fields[3]) != 0 {
- return nil, "", fmt.Errorf("go env returned unexpected output: %q", stdout.String())
- }
- goroot, gopath, gomod := fields[0], filepath.SplitList(fields[1]), fields[2]
- var modDir string
- if gomod != "" {
- modDir = filepath.Dir(gomod)
+ return nil, err
}
-
- var roots []gopathwalk.Root
- // Always add GOROOT.
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(goroot, "/src"),
- Type: gopathwalk.RootGOROOT,
- })
- // If modules are enabled, scan the module dir.
- if modDir != "" {
- roots = append(roots, gopathwalk.Root{
- Path: modDir,
- Type: gopathwalk.RootCurrentModule,
+ // If we get nothing back from `go list`,
+ // try to make this file into its own ad-hoc package.
+ // TODO(rstambler): Should this check against the original response?
+ if len(response.Packages) == 0 {
+ response.Packages = append(response.Packages, &Package{
+ ID: "command-line-arguments",
+ PkgPath: query,
+ GoFiles: []string{query},
+ CompiledGoFiles: []string{query},
+ Imports: make(map[string]*Package),
})
- }
- // Add either GOPATH/src or GOPATH/pkg/mod, depending on module mode.
- for _, p := range gopath {
- if modDir != "" {
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(p, "/pkg/mod"),
- Type: gopathwalk.RootModuleCache,
- })
- } else {
- roots = append(roots, gopathwalk.Root{
- Path: filepath.Join(p, "/src"),
- Type: gopathwalk.RootGOPATH,
- })
- }
- }
-
- return roots, modDir, nil
-}
-
-// These functions were copied from goimports. See further documentation there.
-
-// pathMatchesQueries is adapted from pkgIsCandidate.
-// TODO: is it reasonable to do Contains here, rather than an exact match on a path component?
-func pathMatchesQueries(path string, queries []string) bool {
- lastTwo := lastTwoComponents(path)
- for _, query := range queries {
- if strings.Contains(lastTwo, query) {
- return true
- }
- if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(query) {
- lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
- if strings.Contains(lastTwo, query) {
- return true
- }
- }
- }
- return false
-}
-
-// lastTwoComponents returns at most the last two path components
-// of v, using either / or \ as the path separator.
-func lastTwoComponents(v string) string {
- nslash := 0
- for i := len(v) - 1; i >= 0; i-- {
- if v[i] == '/' || v[i] == '\\' {
- nslash++
- if nslash == 2 {
- return v[i:]
+ response.Roots = append(response.Roots, "command-line-arguments")
+ }
+ // Handle special cases.
+ if len(response.Packages) == 1 {
+ // golang/go#33482: If this is a file= query for ad-hoc packages where
+ // the file only exists on an overlay, and exists outside of a module,
+ // add the file to the package and remove the errors.
+ if response.Packages[0].ID == "command-line-arguments" ||
+ filepath.ToSlash(response.Packages[0].PkgPath) == filepath.ToSlash(query) {
+ if len(response.Packages[0].GoFiles) == 0 {
+ filename := filepath.Join(pattern, filepath.Base(query)) // avoid recomputing abspath
+ // TODO(matloob): check if the file is outside of a root dir?
+ for path := range state.cfg.Overlay {
+ if path == filename {
+ response.Packages[0].Errors = nil
+ response.Packages[0].GoFiles = []string{path}
+ response.Packages[0].CompiledGoFiles = []string{path}
+ }
+ }
}
}
}
- return v
-}
-
-func hasHyphenOrUpperASCII(s string) bool {
- for i := 0; i < len(s); i++ {
- b := s[i]
- if b == '-' || ('A' <= b && b <= 'Z') {
- return true
- }
- }
- return false
-}
-
-func lowerASCIIAndRemoveHyphen(s string) (ret string) {
- buf := make([]byte, 0, len(s))
- for i := 0; i < len(s); i++ {
- b := s[i]
- switch {
- case b == '-':
- continue
- case 'A' <= b && b <= 'Z':
- buf = append(buf, b+('a'-'A'))
- default:
- buf = append(buf, b)
- }
- }
- return string(buf)
+ return response, nil
}
// Fields must match go list;
@@ -636,6 +381,7 @@ type jsonPackage struct {
Imports []string
ImportMap map[string]string
Deps []string
+ Module *packagesinternal.Module
TestGoFiles []string
TestImports []string
XTestGoFiles []string
@@ -656,10 +402,9 @@ func otherFiles(p *jsonPackage) [][]string {
return [][]string{p.CFiles, p.CXXFiles, p.MFiles, p.HFiles, p.FFiles, p.SFiles, p.SwigFiles, p.SwigCXXFiles, p.SysoFiles}
}
-// golistDriver uses the "go list" command to expand the pattern
-// words and return metadata for the specified packages. dir may be
-// "" and env may be nil, as per os/exec.Command.
-func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driverResponse, error) {
+// createDriverResponse uses the "go list" command to expand the pattern
+// words and return a response for the specified packages.
+func (state *golistState) createDriverResponse(words ...string) (*driverResponse, error) {
// go list uses the following identifiers in ImportPath and Imports:
//
// "p" -- importable package or main (command)
@@ -673,11 +418,13 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// Run "go list" for complete
// information on the specified packages.
- buf, err := invokeGo(cfg, golistargs(cfg, words)...)
+ buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
if err != nil {
return nil, err
}
seen := make(map[string]*jsonPackage)
+ pkgs := make(map[string]*Package)
+ additionalErrors := make(map[string][]Error)
// Decode the JSON and convert it to Package form.
var response driverResponse
for dec := json.NewDecoder(buf); dec.More(); {
@@ -708,18 +455,72 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
// contained in a known module or GOPATH entry. This will allow the package to be
// properly "reclaimed" when overlays are processed.
if filepath.IsAbs(p.ImportPath) && p.Error != nil {
- pkgPath, ok := getPkgPath(cfg, p.ImportPath, rootsDirs)
+ pkgPath, ok, err := state.getPkgPath(p.ImportPath)
+ if err != nil {
+ return nil, err
+ }
if ok {
p.ImportPath = pkgPath
}
}
if old, found := seen[p.ImportPath]; found {
- if !reflect.DeepEqual(p, old) {
- return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath)
+ // If one version of the package has an error, and the other doesn't, assume
+ // that this is a case where go list is reporting a fake dependency variant
+ // of the imported package: When a package tries to invalidly import another
+ // package, go list emits a variant of the imported package (with the same
+ // import path, but with an error on it, and the package will have a
+ // DepError set on it). An example of when this can happen is for imports of
+ // main packages: main packages can not be imported, but they may be
+ // separately matched and listed by another pattern.
+ // See golang.org/issue/36188 for more details.
+
+ // The plan is that eventually, hopefully in Go 1.15, the error will be
+ // reported on the importing package rather than the duplicate "fake"
+ // version of the imported package. Once all supported versions of Go
+ // have the new behavior this logic can be deleted.
+ // TODO(matloob): delete the workaround logic once all supported versions of
+ // Go return the errors on the proper package.
+
+ // There should be exactly one version of a package that doesn't have an
+ // error.
+ if old.Error == nil && p.Error == nil {
+ if !reflect.DeepEqual(p, old) {
+ return nil, fmt.Errorf("internal error: go list gives conflicting information for package %v", p.ImportPath)
+ }
+ continue
}
- // skip the duplicate
- continue
+
+ // Determine if this package's error needs to be bubbled up.
+ // This is a hack, and we expect for go list to eventually set the error
+ // on the package.
+ if old.Error != nil {
+ var errkind string
+ if strings.Contains(old.Error.Err, "not an importable package") {
+ errkind = "not an importable package"
+ } else if strings.Contains(old.Error.Err, "use of internal package") && strings.Contains(old.Error.Err, "not allowed") {
+ errkind = "use of internal package not allowed"
+ }
+ if errkind != "" {
+ if len(old.Error.ImportStack) < 2 {
+ return nil, fmt.Errorf(`internal error: go list gave a %q error with an import stack with fewer than two elements`, errkind)
+ }
+ importingPkg := old.Error.ImportStack[len(old.Error.ImportStack)-2]
+ additionalErrors[importingPkg] = append(additionalErrors[importingPkg], Error{
+ Pos: old.Error.Pos,
+ Msg: old.Error.Err,
+ Kind: ListError,
+ })
+ }
+ }
+
+ // Make sure that if there's a version of the package without an error,
+ // that's the one reported to the user.
+ if old.Error == nil {
+ continue
+ }
+
+ // This package will replace the old one at the end of the loop.
}
seen[p.ImportPath] = p
@@ -729,6 +530,8 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
+ forTest: p.ForTest,
+ module: p.Module,
}
// Work around https://golang.org/issue/28749:
@@ -811,34 +614,43 @@ func golistDriver(cfg *Config, rootsDirs func() *goInfo, words ...string) (*driv
msg += fmt.Sprintf(": import stack: %v", p.Error.ImportStack)
}
pkg.Errors = append(pkg.Errors, Error{
- Pos: p.Error.Pos,
- Msg: msg,
+ Pos: p.Error.Pos,
+ Msg: msg,
+ Kind: ListError,
})
}
+ pkgs[pkg.ID] = pkg
+ }
+
+ for id, errs := range additionalErrors {
+ if p, ok := pkgs[id]; ok {
+ p.Errors = append(p.Errors, errs...)
+ }
+ }
+ for _, pkg := range pkgs {
response.Packages = append(response.Packages, pkg)
}
+ sort.Slice(response.Packages, func(i, j int) bool { return response.Packages[i].ID < response.Packages[j].ID })
return &response, nil
}
// getPkgPath finds the package path of a directory if it's relative to a root directory.
-func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) {
+func (state *golistState) getPkgPath(dir string) (string, bool, error) {
absDir, err := filepath.Abs(dir)
if err != nil {
- cfg.Logf("error getting absolute path of %s: %v", dir, err)
- return "", false
+ return "", false, err
}
- for rdir, rpath := range goInfo().rootDirs {
- absRdir, err := filepath.Abs(rdir)
- if err != nil {
- cfg.Logf("error getting absolute path of %s: %v", rdir, err)
- continue
- }
+ roots, err := state.determineRootDirs()
+ if err != nil {
+ return "", false, err
+ }
+
+ for rdir, rpath := range roots {
// Make sure that the directory is in the module,
// to avoid creating a path relative to another module.
- if !strings.HasPrefix(absDir, absRdir) {
- cfg.Logf("%s does not have prefix %s", absDir, absRdir)
+ if !strings.HasPrefix(absDir, rdir) {
continue
}
// TODO(matloob): This doesn't properly handle symlinks.
@@ -853,11 +665,11 @@ func getPkgPath(cfg *Config, dir string, goInfo func() *goInfo) (string, bool) {
// Once the file is saved, gopls, or the next invocation of the tool will get the correct
// result straight from golist.
// TODO(matloob): Implement module tiebreaking?
- return path.Join(rpath, filepath.ToSlash(r)), true
+ return path.Join(rpath, filepath.ToSlash(r)), true, nil
}
- return filepath.ToSlash(r), true
+ return filepath.ToSlash(r), true, nil
}
- return "", false
+ return "", false, nil
}
// absJoin absolutizes and flattens the lists of files.
@@ -876,8 +688,8 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
func golistargs(cfg *Config, words []string) []string {
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
fullargs := []string{
- "list", "-e", "-json",
- fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypesInfo|NeedTypesSizes) != 0),
+ "-e", "-json",
+ fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
fmt.Sprintf("-test=%t", cfg.Tests),
fmt.Sprintf("-export=%t", usesExportData(cfg)),
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
@@ -892,25 +704,20 @@ func golistargs(cfg *Config, words []string) []string {
}
// invokeGo returns the stdout of a go command invocation.
-func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
- stdout := new(bytes.Buffer)
- stderr := new(bytes.Buffer)
- cmd := exec.CommandContext(cfg.Context, "go", args...)
- // On darwin the cwd gets resolved to the real path, which breaks anything that
- // expects the working directory to keep the original path, including the
- // go command when dealing with modules.
- // The Go stdlib has a special feature where if the cwd and the PWD are the
- // same node then it trusts the PWD, so by setting it in the env for the child
- // process we fix up all the paths returned by the go command.
- cmd.Env = append(append([]string{}, cfg.Env...), "PWD="+cfg.Dir)
- cmd.Dir = cfg.Dir
- cmd.Stdout = stdout
- cmd.Stderr = stderr
- defer func(start time.Time) {
- cfg.Logf("%s for %v, stderr: <<%s>> stdout: <<%s>>\n", time.Since(start), cmdDebugStr(cmd, args...), stderr, stdout)
- }(time.Now())
-
- if err := cmd.Run(); err != nil {
+func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer, error) {
+ cfg := state.cfg
+
+ inv := &gocommand.Invocation{
+ Verb: verb,
+ Args: args,
+ BuildFlags: cfg.BuildFlags,
+ Env: cfg.Env,
+ Logf: cfg.Logf,
+ WorkingDir: cfg.Dir,
+ }
+
+ stdout, stderr, _, err := inv.RunRaw(cfg.Context)
+ if err != nil {
// Check for 'go' executable not being found.
if ee, ok := err.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
return nil, fmt.Errorf("'go list' driver requires 'go', but %s", exec.ErrNotFound)
@@ -920,7 +727,7 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
if !ok {
// Catastrophic error:
// - context cancellation
- return nil, fmt.Errorf("couldn't exec 'go %v': %s %T", args, err, err)
+ return nil, fmt.Errorf("couldn't run 'go': %v", err)
}
// Old go version?
@@ -947,7 +754,12 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
!strings.ContainsRune("!\"#$%&'()*,:;<=>?[\\]^`{|}\uFFFD", r)
}
if len(stderr.String()) > 0 && strings.HasPrefix(stderr.String(), "# ") {
- if strings.HasPrefix(strings.TrimLeftFunc(stderr.String()[len("# "):], isPkgPathRune), "\n") {
+ msg := stderr.String()[len("# "):]
+ if strings.HasPrefix(strings.TrimLeftFunc(msg, isPkgPathRune), "\n") {
+ return stdout, nil
+ }
+ // Treat pkg-config errors as a special case (golang.org/issue/36770).
+ if strings.HasPrefix(msg, "pkg-config") {
return stdout, nil
}
}
@@ -1036,16 +848,6 @@ func invokeGo(cfg *Config, args ...string) (*bytes.Buffer, error) {
return nil, fmt.Errorf("go %v: %s: %s", args, exitErr, stderr)
}
}
-
- // As of writing, go list -export prints some non-fatal compilation
- // errors to stderr, even with -e set. We would prefer that it put
- // them in the Package.Error JSON (see https://golang.org/issue/26319).
- // In the meantime, there's nowhere good to put them, but they can
- // be useful for debugging. Print them if $GOPACKAGESPRINTGOLISTERRORS
- // is set.
- if len(stderr.Bytes()) != 0 && os.Getenv("GOPACKAGESPRINTGOLISTERRORS") != "" {
- fmt.Fprintf(os.Stderr, "%s stderr: <<%s>>\n", cmdDebugStr(cmd, args...), stderr)
- }
return stdout, nil
}
diff --git a/vendor/golang.org/x/tools/go/packages/golist_overlay.go b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
index a7de62299d..7974a6c9bb 100644
--- a/vendor/golang.org/x/tools/go/packages/golist_overlay.go
+++ b/vendor/golang.org/x/tools/go/packages/golist_overlay.go
@@ -1,12 +1,13 @@
package packages
import (
- "bytes"
"encoding/json"
"fmt"
"go/parser"
"go/token"
+ "os"
"path/filepath"
+ "sort"
"strconv"
"strings"
)
@@ -16,7 +17,7 @@ import (
// sometimes incorrect.
// TODO(matloob): Handle unsupported cases, including the following:
// - determining the correct package to add given a new import path
-func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func() *goInfo) (modifiedPkgs, needPkgs []string, err error) {
+func (state *golistState) processGolistOverlay(response *responseDeduper) (modifiedPkgs, needPkgs []string, err error) {
havePkgs := make(map[string]string) // importPath -> non-test package ID
needPkgsSet := make(map[string]bool)
modifiedPkgsSet := make(map[string]bool)
@@ -34,7 +35,23 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
// potentially modifying the transitive set of dependencies).
var overlayAddsImports bool
- for opath, contents := range cfg.Overlay {
+ // If both a package and its test package are created by the overlay, we
+ // need the real package first. Process all non-test files before test
+ // files, and make the whole process deterministic while we're at it.
+ var overlayFiles []string
+ for opath := range state.cfg.Overlay {
+ overlayFiles = append(overlayFiles, opath)
+ }
+ sort.Slice(overlayFiles, func(i, j int) bool {
+ iTest := strings.HasSuffix(overlayFiles[i], "_test.go")
+ jTest := strings.HasSuffix(overlayFiles[j], "_test.go")
+ if iTest != jTest {
+ return !iTest // non-tests are before tests.
+ }
+ return overlayFiles[i] < overlayFiles[j]
+ })
+ for _, opath := range overlayFiles {
+ contents := state.cfg.Overlay[opath]
base := filepath.Base(opath)
dir := filepath.Dir(opath)
var pkg *Package // if opath belongs to both a package and its test variant, this will be the test variant
@@ -64,14 +81,8 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
testVariantOf = p
continue nextPackage
}
+ // We must have already seen the package of which this is a test variant.
if pkg != nil && p != pkg && pkg.PkgPath == p.PkgPath {
- // If we've already seen the test variant,
- // make sure to label which package it is a test variant of.
- if hasTestFiles(pkg) {
- testVariantOf = p
- continue nextPackage
- }
- // If we have already seen the package of which this is a test variant.
if hasTestFiles(p) {
testVariantOf = pkg
}
@@ -86,7 +97,10 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if pkg == nil {
// Try to find the module or gopath dir the file is contained in.
// Then for modules, add the module opath to the beginning.
- pkgPath, ok := getPkgPath(cfg, dir, rootDirs)
+ pkgPath, ok, err := state.getPkgPath(dir)
+ if err != nil {
+ return nil, nil, err
+ }
if !ok {
break
}
@@ -114,6 +128,11 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
if isTestFile && !isXTest && testVariantOf != nil {
pkg.GoFiles = append(pkg.GoFiles, testVariantOf.GoFiles...)
pkg.CompiledGoFiles = append(pkg.CompiledGoFiles, testVariantOf.CompiledGoFiles...)
+ // Add the package under test and its imports to the test variant.
+ pkg.forTest = testVariantOf.PkgPath
+ for k, v := range testVariantOf.Imports {
+ pkg.Imports[k] = &Package{ID: v.ID}
+ }
}
}
}
@@ -130,42 +149,45 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
continue
}
for _, imp := range imports {
- _, found := pkg.Imports[imp]
- if !found {
- overlayAddsImports = true
- // TODO(matloob): Handle cases when the following block isn't correct.
- // These include imports of vendored packages, etc.
- id, ok := havePkgs[imp]
- if !ok {
- id = imp
- }
- pkg.Imports[imp] = &Package{ID: id}
- // Add dependencies to the non-test variant version of this package as wel.
- if testVariantOf != nil {
- testVariantOf.Imports[imp] = &Package{ID: id}
+ if _, found := pkg.Imports[imp]; found {
+ continue
+ }
+ overlayAddsImports = true
+ id, ok := havePkgs[imp]
+ if !ok {
+ var err error
+ id, err = state.resolveImport(dir, imp)
+ if err != nil {
+ return nil, nil, err
}
}
+ pkg.Imports[imp] = &Package{ID: id}
+ // Add dependencies to the non-test variant version of this package as well.
+ if testVariantOf != nil {
+ testVariantOf.Imports[imp] = &Package{ID: id}
+ }
}
- continue
}
- // toPkgPath tries to guess the package path given the id.
- // This isn't always correct -- it's certainly wrong for
- // vendored packages' paths.
- toPkgPath := func(id string) string {
- // TODO(matloob): Handle vendor paths.
- i := strings.IndexByte(id, ' ')
- if i >= 0 {
- return id[:i]
+ // toPkgPath guesses the package path given the id.
+ toPkgPath := func(sourceDir, id string) (string, error) {
+ if i := strings.IndexByte(id, ' '); i >= 0 {
+ return state.resolveImport(sourceDir, id[:i])
}
- return id
+ return state.resolveImport(sourceDir, id)
}
- // Do another pass now that new packages have been created to determine the
- // set of missing packages.
+ // Now that new packages have been created, do another pass to determine
+ // the new set of missing packages.
for _, pkg := range response.dr.Packages {
for _, imp := range pkg.Imports {
- pkgPath := toPkgPath(imp.ID)
+ if len(pkg.GoFiles) == 0 {
+ return nil, nil, fmt.Errorf("cannot resolve imports for package %q with no Go files", pkg.PkgPath)
+ }
+ pkgPath, err := toPkgPath(filepath.Dir(pkg.GoFiles[0]), imp.ID)
+ if err != nil {
+ return nil, nil, err
+ }
if _, ok := havePkgs[pkgPath]; !ok {
needPkgsSet[pkgPath] = true
}
@@ -185,6 +207,52 @@ func processGolistOverlay(cfg *Config, response *responseDeduper, rootDirs func(
return modifiedPkgs, needPkgs, err
}
+// resolveImport finds the the ID of a package given its import path.
+// In particular, it will find the right vendored copy when in GOPATH mode.
+func (state *golistState) resolveImport(sourceDir, importPath string) (string, error) {
+ env, err := state.getEnv()
+ if err != nil {
+ return "", err
+ }
+ if env["GOMOD"] != "" {
+ return importPath, nil
+ }
+
+ searchDir := sourceDir
+ for {
+ vendorDir := filepath.Join(searchDir, "vendor")
+ exists, ok := state.vendorDirs[vendorDir]
+ if !ok {
+ info, err := os.Stat(vendorDir)
+ exists = err == nil && info.IsDir()
+ state.vendorDirs[vendorDir] = exists
+ }
+
+ if exists {
+ vendoredPath := filepath.Join(vendorDir, importPath)
+ if info, err := os.Stat(vendoredPath); err == nil && info.IsDir() {
+ // We should probably check for .go files here, but shame on anyone who fools us.
+ path, ok, err := state.getPkgPath(vendoredPath)
+ if err != nil {
+ return "", err
+ }
+ if ok {
+ return path, nil
+ }
+ }
+ }
+
+ // We know we've hit the top of the filesystem when we Dir / and get /,
+ // or C:\ and get C:\, etc.
+ next := filepath.Dir(searchDir)
+ if next == searchDir {
+ break
+ }
+ searchDir = next
+ }
+ return importPath, nil
+}
+
func hasTestFiles(p *Package) bool {
for _, f := range p.GoFiles {
if strings.HasSuffix(f, "_test.go") {
@@ -194,44 +262,59 @@ func hasTestFiles(p *Package) bool {
return false
}
-// determineRootDirs returns a mapping from directories code can be contained in to the
-// corresponding import path prefixes of those directories.
-// Its result is used to try to determine the import path for a package containing
-// an overlay file.
-func determineRootDirs(cfg *Config) map[string]string {
- // Assume modules first:
- out, err := invokeGo(cfg, "list", "-m", "-json", "all")
+// determineRootDirs returns a mapping from absolute directories that could
+// contain code to their corresponding import path prefixes.
+func (state *golistState) determineRootDirs() (map[string]string, error) {
+ env, err := state.getEnv()
if err != nil {
- return determineRootDirsGOPATH(cfg)
+ return nil, err
+ }
+ if env["GOMOD"] != "" {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsModules()
+ })
+ } else {
+ state.rootsOnce.Do(func() {
+ state.rootDirs, state.rootDirsError = state.determineRootDirsGOPATH()
+ })
+ }
+ return state.rootDirs, state.rootDirsError
+}
+
+func (state *golistState) determineRootDirsModules() (map[string]string, error) {
+ out, err := state.invokeGo("list", "-m", "-json", "all")
+ if err != nil {
+ return nil, err
}
m := map[string]string{}
type jsonMod struct{ Path, Dir string }
for dec := json.NewDecoder(out); dec.More(); {
mod := new(jsonMod)
if err := dec.Decode(mod); err != nil {
- return m // Give up and return an empty map. Package won't be found for overlay.
+ return nil, err
}
if mod.Dir != "" && mod.Path != "" {
// This is a valid module; add it to the map.
- m[mod.Dir] = mod.Path
+ absDir, err := filepath.Abs(mod.Dir)
+ if err != nil {
+ return nil, err
+ }
+ m[absDir] = mod.Path
}
}
- return m
+ return m, nil
}
-func determineRootDirsGOPATH(cfg *Config) map[string]string {
+func (state *golistState) determineRootDirsGOPATH() (map[string]string, error) {
m := map[string]string{}
- out, err := invokeGo(cfg, "env", "GOPATH")
- if err != nil {
- // Could not determine root dir mapping. Everything is best-effort, so just return an empty map.
- // When we try to find the import path for a directory, there will be no root-dir match and
- // we'll give up.
- return m
- }
- for _, p := range filepath.SplitList(string(bytes.TrimSpace(out.Bytes()))) {
- m[filepath.Join(p, "src")] = ""
+ for _, dir := range filepath.SplitList(state.mustGetEnv()["GOPATH"]) {
+ absDir, err := filepath.Abs(dir)
+ if err != nil {
+ return nil, err
+ }
+ m[filepath.Join(absDir, "src")] = ""
}
- return m
+ return m, nil
}
func extractImports(filename string, contents []byte) ([]string, error) {
diff --git a/vendor/golang.org/x/tools/go/packages/packages.go b/vendor/golang.org/x/tools/go/packages/packages.go
index 050cca43a2..1ac6558c1d 100644
--- a/vendor/golang.org/x/tools/go/packages/packages.go
+++ b/vendor/golang.org/x/tools/go/packages/packages.go
@@ -23,6 +23,7 @@ import (
"sync"
"golang.org/x/tools/go/gcexportdata"
+ "golang.org/x/tools/internal/packagesinternal"
)
// A LoadMode controls the amount of detail to return when loading.
@@ -34,6 +35,9 @@ import (
// Load may return more information than requested.
type LoadMode int
+// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
+// NeedExportFile to make it consistent with the Package field it's adding.
+
const (
// NeedName adds Name and PkgPath.
NeedName LoadMode = 1 << iota
@@ -51,7 +55,7 @@ const (
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
NeedDeps
- // NeedExportsFile adds ExportsFile.
+ // NeedExportsFile adds ExportFile.
NeedExportsFile
// NeedTypes adds Types, Fset, and IllTyped.
@@ -160,7 +164,7 @@ type Config struct {
Tests bool
// Overlay provides a mapping of absolute file paths to file contents.
- // If the file with the given path already exists, the parser will use the
+ // If the file with the given path already exists, the parser will use the
// alternative file contents provided by the map.
//
// Overlays provide incomplete support for when a given file doesn't
@@ -292,6 +296,21 @@ type Package struct {
// TypesSizes provides the effective size function for types in TypesInfo.
TypesSizes types.Sizes
+
+ // forTest is the package under test, if any.
+ forTest string
+
+ // module is the module information for the package if it exists.
+ module *packagesinternal.Module
+}
+
+func init() {
+ packagesinternal.GetForTest = func(p interface{}) string {
+ return p.(*Package).forTest
+ }
+ packagesinternal.GetModule = func(p interface{}) *packagesinternal.Module {
+ return p.(*Package).module
+ }
}
// An Error describes a problem with a package's metadata, syntax, or types.
@@ -500,12 +519,23 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
if i, found := rootMap[pkg.ID]; found {
rootIndex = i
}
+
+ // Overlays can invalidate export data.
+ // TODO(matloob): make this check fine-grained based on dependencies on overlaid files
+ exportDataInvalid := len(ld.Overlay) > 0 || pkg.ExportFile == "" && pkg.PkgPath != "unsafe"
+ // This package needs type information if the caller requested types and the package is
+ // either a root, or it's a non-root and the user requested dependencies ...
+ needtypes := (ld.Mode&NeedTypes|NeedTypesInfo != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0))
+ // This package needs source if the call requested source (or types info, which implies source)
+ // and the package is either a root, or itas a non- root and the user requested dependencies...
+ needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
+ // ... or if we need types and the exportData is invalid. We fall back to (incompletely)
+ // typechecking packages from source if they fail to compile.
+ (ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
lpkg := &loaderPackage{
Package: pkg,
- needtypes: (ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0,
- needsrc: (ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && ld.Mode&NeedDeps != 0 && rootIndex < 0) || rootIndex >= 0 ||
- len(ld.Overlay) > 0 || // Overlays can invalidate export data. TODO(matloob): make this check fine-grained based on dependencies on overlaid files
- pkg.ExportFile == "" && pkg.PkgPath != "unsafe",
+ needtypes: needtypes,
+ needsrc: needsrc,
}
ld.pkgs[lpkg.ID] = lpkg
if rootIndex >= 0 {
@@ -713,7 +743,7 @@ func (ld *loader) loadPackage(lpkg *loaderPackage) {
// which would then require that such created packages be explicitly
// inserted back into the Import graph as a final step after export data loading.
// The Diamond test exercises this case.
- if !lpkg.needtypes {
+ if !lpkg.needtypes && !lpkg.needsrc {
return
}
if !lpkg.needsrc {
diff --git a/vendor/golang.org/x/tools/imports/forward.go b/vendor/golang.org/x/tools/imports/forward.go
index eef25969de..b4f4287679 100644
--- a/vendor/golang.org/x/tools/imports/forward.go
+++ b/vendor/golang.org/x/tools/imports/forward.go
@@ -4,6 +4,7 @@ package imports // import "golang.org/x/tools/imports"
import (
"go/build"
+ "os"
intimp "golang.org/x/tools/internal/imports"
)
@@ -42,6 +43,10 @@ func Process(filename string, src []byte, opt *Options) ([]byte, error) {
Env: &intimp.ProcessEnv{
GOPATH: build.Default.GOPATH,
GOROOT: build.Default.GOROOT,
+ GOFLAGS: os.Getenv("GOFLAGS"),
+ GO111MODULE: os.Getenv("GO111MODULE"),
+ GOPROXY: os.Getenv("GOPROXY"),
+ GOSUMDB: os.Getenv("GOSUMDB"),
Debug: Debug,
LocalPrefix: LocalPrefix,
},
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
index 7219c8e9ff..9887f7e7a0 100644
--- a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk.go
@@ -14,14 +14,14 @@ import (
"sync"
)
-// TraverseLink is used as a return value from WalkFuncs to indicate that the
+// ErrTraverseLink is used as a return value from WalkFuncs to indicate that the
// symlink named in the call may be traversed.
-var TraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
+var ErrTraverseLink = errors.New("fastwalk: traverse symlink, assuming target is a directory")
-// SkipFiles is a used as a return value from WalkFuncs to indicate that the
+// ErrSkipFiles is a used as a return value from WalkFuncs to indicate that the
// callback should not be called for any other files in the current directory.
// Child directories will still be traversed.
-var SkipFiles = errors.New("fastwalk: skip remaining files in directory")
+var ErrSkipFiles = errors.New("fastwalk: skip remaining files in directory")
// Walk is a faster implementation of filepath.Walk.
//
@@ -167,7 +167,7 @@ func (w *walker) onDirEnt(dirName, baseName string, typ os.FileMode) error {
err := w.fn(joined, typ)
if typ == os.ModeSymlink {
- if err == TraverseLink {
+ if err == ErrTraverseLink {
// Set callbackDone so we don't call it twice for both the
// symlink-as-symlink and the symlink-as-directory later:
w.enqueue(walkItem{dir: joined, callbackDone: true})
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
index a906b87595..b0d6327a9e 100644
--- a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_portable.go
@@ -26,7 +26,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue
}
if err := fn(dirName, fi.Name(), fi.Mode()&os.ModeType); err != nil {
- if err == SkipFiles {
+ if err == ErrSkipFiles {
skipFiles = true
continue
}
diff --git a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
index 3369b1a0b2..ce38fdcf83 100644
--- a/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
+++ b/vendor/golang.org/x/tools/internal/fastwalk/fastwalk_unix.go
@@ -66,7 +66,7 @@ func readDir(dirName string, fn func(dirName, entName string, typ os.FileMode) e
continue
}
if err := fn(dirName, name, typ); err != nil {
- if err == SkipFiles {
+ if err == ErrSkipFiles {
skipFiles = true
continue
}
diff --git a/vendor/golang.org/x/tools/internal/gocommand/invoke.go b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
new file mode 100644
index 0000000000..75d73e744f
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/gocommand/invoke.go
@@ -0,0 +1,121 @@
+// Package gocommand is a helper for calling the go command.
+package gocommand
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+ "time"
+)
+
+// An Invocation represents a call to the go command.
+type Invocation struct {
+ Verb string
+ Args []string
+ BuildFlags []string
+ Env []string
+ WorkingDir string
+ Logf func(format string, args ...interface{})
+}
+
+// Run runs the invocation, returning its stdout and an error suitable for
+// human consumption, including stderr.
+func (i *Invocation) Run(ctx context.Context) (*bytes.Buffer, error) {
+ stdout, _, friendly, _ := i.RunRaw(ctx)
+ return stdout, friendly
+}
+
+// RunRaw is like Run, but also returns the raw stderr and error for callers
+// that want to do low-level error handling/recovery.
+func (i *Invocation) RunRaw(ctx context.Context) (stdout *bytes.Buffer, stderr *bytes.Buffer, friendlyError error, rawError error) {
+ log := i.Logf
+ if log == nil {
+ log = func(string, ...interface{}) {}
+ }
+
+ goArgs := []string{i.Verb}
+ switch i.Verb {
+ case "mod":
+ // mod needs the sub-verb before build flags.
+ goArgs = append(goArgs, i.Args[0])
+ goArgs = append(goArgs, i.BuildFlags...)
+ goArgs = append(goArgs, i.Args[1:]...)
+ case "env":
+ // env doesn't take build flags.
+ goArgs = append(goArgs, i.Args...)
+ default:
+ goArgs = append(goArgs, i.BuildFlags...)
+ goArgs = append(goArgs, i.Args...)
+ }
+ cmd := exec.Command("go", goArgs...)
+ stdout = &bytes.Buffer{}
+ stderr = &bytes.Buffer{}
+ cmd.Stdout = stdout
+ cmd.Stderr = stderr
+ // On darwin the cwd gets resolved to the real path, which breaks anything that
+ // expects the working directory to keep the original path, including the
+ // go command when dealing with modules.
+ // The Go stdlib has a special feature where if the cwd and the PWD are the
+ // same node then it trusts the PWD, so by setting it in the env for the child
+ // process we fix up all the paths returned by the go command.
+ cmd.Env = append(append([]string{}, i.Env...), "PWD="+i.WorkingDir)
+ cmd.Dir = i.WorkingDir
+
+ defer func(start time.Time) { log("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
+
+ rawError = runCmdContext(ctx, cmd)
+ friendlyError = rawError
+ if rawError != nil {
+ // Check for 'go' executable not being found.
+ if ee, ok := rawError.(*exec.Error); ok && ee.Err == exec.ErrNotFound {
+ friendlyError = fmt.Errorf("go command required, not found: %v", ee)
+ }
+ if ctx.Err() != nil {
+ friendlyError = ctx.Err()
+ }
+ friendlyError = fmt.Errorf("err: %v: stderr: %s", rawError, stderr)
+ }
+ return
+}
+
+// runCmdContext is like exec.CommandContext except it sends os.Interrupt
+// before os.Kill.
+func runCmdContext(ctx context.Context, cmd *exec.Cmd) error {
+ if err := cmd.Start(); err != nil {
+ return err
+ }
+ resChan := make(chan error, 1)
+ go func() {
+ resChan <- cmd.Wait()
+ }()
+
+ select {
+ case err := <-resChan:
+ return err
+ case <-ctx.Done():
+ }
+ // Cancelled. Interrupt and see if it ends voluntarily.
+ cmd.Process.Signal(os.Interrupt)
+ select {
+ case err := <-resChan:
+ return err
+ case <-time.After(time.Second):
+ }
+ // Didn't shut down in response to interrupt. Kill it hard.
+ cmd.Process.Kill()
+ return <-resChan
+}
+
+func cmdDebugStr(cmd *exec.Cmd) string {
+ env := make(map[string]string)
+ for _, kv := range cmd.Env {
+ split := strings.Split(kv, "=")
+ k, v := split[0], split[1]
+ env[k] = v
+ }
+
+ return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], cmd.Args)
+}
diff --git a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
index 9a61bdbf5d..64309db74c 100644
--- a/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
+++ b/vendor/golang.org/x/tools/internal/gopathwalk/walk.go
@@ -77,6 +77,7 @@ func WalkSkip(roots []Root, add func(root Root, dir string), skip func(root Root
}
}
+// walkDir creates a walker and starts fastwalk with this walker.
func walkDir(root Root, add func(Root, string), skip func(root Root, dir string) bool, opts Options) {
if _, err := os.Stat(root.Path); os.IsNotExist(err) {
if opts.Debug {
@@ -114,7 +115,7 @@ type walker struct {
ignoredDirs []os.FileInfo // The ignored directories, loaded from .goimportsignore files.
}
-// init initializes the walker based on its Options.
+// init initializes the walker based on its Options
func (w *walker) init() {
var ignoredPaths []string
if w.root.Type == RootModuleCache {
@@ -167,6 +168,7 @@ func (w *walker) getIgnoredDirs(path string) []string {
return ignoredDirs
}
+// shouldSkipDir reports whether the file should be skipped or not.
func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
for _, ignoredDir := range w.ignoredDirs {
if os.SameFile(fi, ignoredDir) {
@@ -180,20 +182,21 @@ func (w *walker) shouldSkipDir(fi os.FileInfo, dir string) bool {
return false
}
+// walk walks through the given path.
func (w *walker) walk(path string, typ os.FileMode) error {
dir := filepath.Dir(path)
if typ.IsRegular() {
if dir == w.root.Path && (w.root.Type == RootGOROOT || w.root.Type == RootGOPATH) {
// Doesn't make sense to have regular files
// directly in your $GOPATH/src or $GOROOT/src.
- return fastwalk.SkipFiles
+ return fastwalk.ErrSkipFiles
}
if !strings.HasSuffix(path, ".go") {
return nil
}
w.add(w.root, dir)
- return fastwalk.SkipFiles
+ return fastwalk.ErrSkipFiles
}
if typ == os.ModeDir {
base := filepath.Base(path)
@@ -221,7 +224,7 @@ func (w *walker) walk(path string, typ os.FileMode) error {
return nil
}
if w.shouldTraverse(dir, fi) {
- return fastwalk.TraverseLink
+ return fastwalk.ErrTraverseLink
}
}
return nil
diff --git a/vendor/golang.org/x/tools/internal/imports/fix.go b/vendor/golang.org/x/tools/internal/imports/fix.go
index f531024da9..5e0c9dff03 100644
--- a/vendor/golang.org/x/tools/internal/imports/fix.go
+++ b/vendor/golang.org/x/tools/internal/imports/fix.go
@@ -14,7 +14,6 @@ import (
"go/token"
"io/ioutil"
"os"
- "os/exec"
"path"
"path/filepath"
"reflect"
@@ -22,12 +21,11 @@ import (
"strconv"
"strings"
"sync"
- "time"
"unicode"
"unicode/utf8"
"golang.org/x/tools/go/ast/astutil"
- "golang.org/x/tools/go/packages"
+ "golang.org/x/tools/internal/gocommand"
"golang.org/x/tools/internal/gopathwalk"
)
@@ -82,7 +80,8 @@ type ImportFix struct {
// IdentName is the identifier that this fix will add or remove.
IdentName string
// FixType is the type of fix this is (AddImport, DeleteImport, SetImportName).
- FixType ImportFixType
+ FixType ImportFixType
+ Relevance int // see pkg
}
// An ImportInfo represents a single import statement.
@@ -537,7 +536,7 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
// derive package names from import paths, see if the file is already
// complete. We can't add any imports yet, because we don't know
// if missing references are actually package vars.
- p := &pass{fset: fset, f: f, srcDir: srcDir}
+ p := &pass{fset: fset, f: f, srcDir: srcDir, env: env}
if fixes, done := p.load(); done {
return fixes, nil
}
@@ -559,8 +558,7 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
}
// Third pass: get real package names where we had previously used
- // the naive algorithm. This is the first step that will use the
- // environment, so we provide it here for the first time.
+ // the naive algorithm.
p = &pass{fset: fset, f: f, srcDir: srcDir, env: env}
p.loadRealPackageNames = true
p.otherFiles = otherFiles
@@ -585,62 +583,86 @@ func getFixes(fset *token.FileSet, f *ast.File, filename string, env *ProcessEnv
return fixes, nil
}
-// getCandidatePkgs returns the list of pkgs that are accessible from filename,
-// optionall filtered to only packages named pkgName.
-func getCandidatePkgs(pkgName, filename string, env *ProcessEnv) ([]*pkg, error) {
- // TODO(heschi): filter out current package. (Don't forget x_test can import x.)
+// Highest relevance, used for the standard library. Chosen arbitrarily to
+// match pre-existing gopls code.
+const MaxRelevance = 7
- var result []*pkg
+// getCandidatePkgs works with the passed callback to find all acceptable packages.
+// It deduplicates by import path, and uses a cached stdlib rather than reading
+// from disk.
+func getCandidatePkgs(ctx context.Context, wrappedCallback *scanCallback, filename, filePkg string, env *ProcessEnv) error {
+ notSelf := func(p *pkg) bool {
+ return p.packageName != filePkg || p.dir != filepath.Dir(filename)
+ }
// Start off with the standard library.
- for importPath := range stdlib {
- if pkgName != "" && path.Base(importPath) != pkgName {
- continue
- }
- result = append(result, &pkg{
+ for importPath, exports := range stdlib {
+ p := &pkg{
dir: filepath.Join(env.GOROOT, "src", importPath),
importPathShort: importPath,
packageName: path.Base(importPath),
- relevance: 0,
- })
- }
-
- // Exclude goroot results -- getting them is relatively expensive, not cached,
- // and generally redundant with the in-memory version.
- exclude := []gopathwalk.RootType{gopathwalk.RootGOROOT}
- // Only the go/packages resolver uses the first argument, and nobody uses that resolver.
- scannedPkgs, err := env.GetResolver().scan(nil, true, exclude)
- if err != nil {
- return nil, err
+ relevance: MaxRelevance,
+ }
+ if notSelf(p) && wrappedCallback.packageNameLoaded(p) {
+ wrappedCallback.exportsLoaded(p, exports)
+ }
}
+ var mu sync.Mutex
dupCheck := map[string]struct{}{}
- for _, pkg := range scannedPkgs {
- if pkgName != "" && pkg.packageName != pkgName {
- continue
- }
- if !canUse(filename, pkg.dir) {
- continue
- }
- if _, ok := dupCheck[pkg.importPathShort]; ok {
- continue
- }
- dupCheck[pkg.importPathShort] = struct{}{}
- result = append(result, pkg)
+
+ scanFilter := &scanCallback{
+ rootFound: func(root gopathwalk.Root) bool {
+ // Exclude goroot results -- getting them is relatively expensive, not cached,
+ // and generally redundant with the in-memory version.
+ return root.Type != gopathwalk.RootGOROOT && wrappedCallback.rootFound(root)
+ },
+ dirFound: wrappedCallback.dirFound,
+ packageNameLoaded: func(pkg *pkg) bool {
+ mu.Lock()
+ defer mu.Unlock()
+ if _, ok := dupCheck[pkg.importPathShort]; ok {
+ return false
+ }
+ dupCheck[pkg.importPathShort] = struct{}{}
+ return notSelf(pkg) && wrappedCallback.packageNameLoaded(pkg)
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ // If we're an x_test, load the package under test's test variant.
+ if strings.HasSuffix(filePkg, "_test") && pkg.dir == filepath.Dir(filename) {
+ var err error
+ _, exports, err = loadExportsFromFiles(ctx, env, pkg.dir, true)
+ if err != nil {
+ return
+ }
+ }
+ wrappedCallback.exportsLoaded(pkg, exports)
+ },
}
+ return env.GetResolver().scan(ctx, scanFilter)
+}
- // Sort first by relevance, then by package name, with import path as a tiebreaker.
- sort.Slice(result, func(i, j int) bool {
- pi, pj := result[i], result[j]
- if pi.relevance != pj.relevance {
- return pi.relevance < pj.relevance
- }
- if pi.packageName != pj.packageName {
- return pi.packageName < pj.packageName
- }
- return pi.importPathShort < pj.importPathShort
- })
+func ScoreImportPaths(ctx context.Context, env *ProcessEnv, paths []string) map[string]int {
+ result := make(map[string]int)
+ for _, path := range paths {
+ result[path] = env.GetResolver().scoreImportPath(ctx, path)
+ }
+ return result
+}
- return result, nil
+func PrimeCache(ctx context.Context, env *ProcessEnv) error {
+ // Fully scan the disk for directories, but don't actually read any Go files.
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return false
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return false
+ },
+ }
+ return getCandidatePkgs(ctx, callback, "", "", env)
}
func candidateImportName(pkg *pkg) string {
@@ -651,23 +673,37 @@ func candidateImportName(pkg *pkg) string {
}
// getAllCandidates gets all of the candidates to be imported, regardless of if they are needed.
-func getAllCandidates(filename string, env *ProcessEnv) ([]ImportFix, error) {
- pkgs, err := getCandidatePkgs("", filename, env)
- if err != nil {
- return nil, err
- }
- result := make([]ImportFix, 0, len(pkgs))
- for _, pkg := range pkgs {
- result = append(result, ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- })
+func getAllCandidates(ctx context.Context, wrapped func(ImportFix), searchPrefix, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ // Try the assumed package name first, then a simpler path match
+ // in case of packages named vN, which are not uncommon.
+ return strings.HasPrefix(ImportPathToAssumedName(pkg.importPathShort), searchPrefix) ||
+ strings.HasPrefix(path.Base(pkg.importPathShort), searchPrefix)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if !strings.HasPrefix(pkg.packageName, searchPrefix) {
+ return false
+ }
+ wrapped(ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ })
+ return false
+ },
}
- return result, nil
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// A PackageExport is a package and its exports.
@@ -676,42 +712,34 @@ type PackageExport struct {
Exports []string
}
-func getPackageExports(completePackage, filename string, env *ProcessEnv) ([]PackageExport, error) {
- pkgs, err := getCandidatePkgs(completePackage, filename, env)
- if err != nil {
- return nil, err
- }
-
- results := make([]PackageExport, 0, len(pkgs))
- for _, pkg := range pkgs {
- fix := &ImportFix{
- StmtInfo: ImportInfo{
- ImportPath: pkg.importPathShort,
- Name: candidateImportName(pkg),
- },
- IdentName: pkg.packageName,
- FixType: AddImport,
- }
- var exports []string
- if e, ok := stdlib[pkg.importPathShort]; ok {
- exports = e
- } else {
- exports, err = loadExportsForPackage(context.Background(), env, completePackage, pkg)
- if err != nil {
- if env.Debug {
- env.Logf("while completing %q, error loading exports from %q: %v", completePackage, pkg.importPathShort, err)
- }
- continue
- }
- }
- sort.Strings(exports)
- results = append(results, PackageExport{
- Fix: fix,
- Exports: exports,
- })
+func getPackageExports(ctx context.Context, wrapped func(PackageExport), searchPkg, filename, filePkg string, env *ProcessEnv) error {
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, references{searchPkg: nil}, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ return pkg.packageName == searchPkg
+ },
+ exportsLoaded: func(pkg *pkg, exports []string) {
+ sort.Strings(exports)
+ wrapped(PackageExport{
+ Fix: &ImportFix{
+ StmtInfo: ImportInfo{
+ ImportPath: pkg.importPathShort,
+ Name: candidateImportName(pkg),
+ },
+ IdentName: pkg.packageName,
+ FixType: AddImport,
+ Relevance: pkg.relevance,
+ },
+ Exports: exports,
+ })
+ },
}
-
- return results, nil
+ return getCandidatePkgs(ctx, callback, filename, filePkg, env)
}
// ProcessEnv contains environment variables and settings that affect the use of
@@ -720,20 +748,26 @@ type ProcessEnv struct {
LocalPrefix string
Debug bool
+ BuildFlags []string
+
// If non-empty, these will be used instead of the
// process-wide values.
GOPATH, GOROOT, GO111MODULE, GOPROXY, GOFLAGS, GOSUMDB string
WorkingDir string
- // If true, use go/packages regardless of the environment.
- ForceGoPackages bool
-
// Logf is the default logger for the ProcessEnv.
Logf func(format string, args ...interface{})
resolver Resolver
}
+// CopyConfig copies the env's configuration into a new env.
+func (e *ProcessEnv) CopyConfig() *ProcessEnv {
+ copy := *e
+ copy.resolver = nil
+ return &copy
+}
+
func (e *ProcessEnv) env() []string {
env := os.Environ()
add := func(k, v string) {
@@ -757,73 +791,55 @@ func (e *ProcessEnv) GetResolver() Resolver {
if e.resolver != nil {
return e.resolver
}
- if e.ForceGoPackages {
- e.resolver = &goPackagesResolver{env: e}
- return e.resolver
- }
-
- out, err := e.invokeGo("env", "GOMOD")
+ out, err := e.invokeGo(context.TODO(), "env", "GOMOD")
if err != nil || len(bytes.TrimSpace(out.Bytes())) == 0 {
- e.resolver = &gopathResolver{env: e}
+ e.resolver = newGopathResolver(e)
return e.resolver
}
- e.resolver = &ModuleResolver{env: e}
+ e.resolver = newModuleResolver(e)
return e.resolver
}
-func (e *ProcessEnv) newPackagesConfig(mode packages.LoadMode) *packages.Config {
- return &packages.Config{
- Mode: mode,
- Dir: e.WorkingDir,
- Env: e.env(),
- }
-}
-
func (e *ProcessEnv) buildContext() *build.Context {
ctx := build.Default
ctx.GOROOT = e.GOROOT
ctx.GOPATH = e.GOPATH
- // As of Go 1.14, build.Context has a WorkingDir field
+ // As of Go 1.14, build.Context has a Dir field
// (see golang.org/issue/34860).
// Populate it only if present.
- if wd := reflect.ValueOf(&ctx).Elem().FieldByName("WorkingDir"); wd.IsValid() && wd.Kind() == reflect.String {
- wd.SetString(e.WorkingDir)
+ rc := reflect.ValueOf(&ctx).Elem()
+ dir := rc.FieldByName("Dir")
+ if !dir.IsValid() {
+ // Working drafts of Go 1.14 named the field "WorkingDir" instead.
+ // TODO(bcmills): Remove this case after the Go 1.14 beta has been released.
+ dir = rc.FieldByName("WorkingDir")
}
- return &ctx
-}
-
-func (e *ProcessEnv) invokeGo(args ...string) (*bytes.Buffer, error) {
- cmd := exec.Command("go", args...)
- stdout := &bytes.Buffer{}
- stderr := &bytes.Buffer{}
- cmd.Stdout = stdout
- cmd.Stderr = stderr
- cmd.Env = e.env()
- cmd.Dir = e.WorkingDir
-
- if e.Debug {
- defer func(start time.Time) { e.Logf("%s for %v", time.Since(start), cmdDebugStr(cmd)) }(time.Now())
+ if dir.IsValid() && dir.Kind() == reflect.String {
+ dir.SetString(e.WorkingDir)
}
- if err := cmd.Run(); err != nil {
- return nil, fmt.Errorf("running go: %v (stderr:\n%s)", err, stderr)
- }
- return stdout, nil
+
+ return &ctx
}
-func cmdDebugStr(cmd *exec.Cmd) string {
- env := make(map[string]string)
- for _, kv := range cmd.Env {
- split := strings.Split(kv, "=")
- k, v := split[0], split[1]
- env[k] = v
+func (e *ProcessEnv) invokeGo(ctx context.Context, verb string, args ...string) (*bytes.Buffer, error) {
+ inv := gocommand.Invocation{
+ Verb: verb,
+ Args: args,
+ BuildFlags: e.BuildFlags,
+ Env: e.env(),
+ Logf: e.Logf,
+ WorkingDir: e.WorkingDir,
}
-
- return fmt.Sprintf("GOROOT=%v GOPATH=%v GO111MODULE=%v GOPROXY=%v PWD=%v go %v", env["GOROOT"], env["GOPATH"], env["GO111MODULE"], env["GOPROXY"], env["PWD"], cmd.Args)
+ return inv.Run(ctx)
}
func addStdlibCandidates(pass *pass, refs references) {
add := func(pkg string) {
+ // Prevent self-imports.
+ if path.Base(pkg) == pass.f.Name.Name && filepath.Join(pass.env.GOROOT, "src", pkg) == pass.srcDir {
+ return
+ }
exports := copyExports(stdlib[pkg])
pass.addCandidate(
&ImportInfo{ImportPath: pkg},
@@ -848,94 +864,65 @@ func addStdlibCandidates(pass *pass, refs references) {
type Resolver interface {
// loadPackageNames loads the package names in importPaths.
loadPackageNames(importPaths []string, srcDir string) (map[string]string, error)
- // scan finds (at least) the packages satisfying refs. If loadNames is true,
- // package names will be set on the results, and dirs whose package name
- // could not be determined will be excluded.
- scan(refs references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error)
+ // scan works with callback to search for packages. See scanCallback for details.
+ scan(ctx context.Context, callback *scanCallback) error
// loadExports returns the set of exported symbols in the package at dir.
// loadExports may be called concurrently.
- loadExports(ctx context.Context, pkg *pkg) (string, []string, error)
+ loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error)
+ // scoreImportPath returns the relevance for an import path.
+ scoreImportPath(ctx context.Context, path string) int
ClearForNewScan()
}
-// gopackagesResolver implements resolver for GOPATH and module workspaces using go/packages.
-type goPackagesResolver struct {
- env *ProcessEnv
+// A scanCallback controls a call to scan and receives its results.
+// In general, minor errors will be silently discarded; a user should not
+// expect to receive a full series of calls for everything.
+type scanCallback struct {
+ // rootFound is called before scanning a new root dir. If it returns true,
+ // the root will be scanned. Returning false will not necessarily prevent
+ // directories from that root making it to dirFound.
+ rootFound func(gopathwalk.Root) bool
+ // dirFound is called when a directory is found that is possibly a Go package.
+ // pkg will be populated with everything except packageName.
+ // If it returns true, the package's name will be loaded.
+ dirFound func(pkg *pkg) bool
+ // packageNameLoaded is called when a package is found and its name is loaded.
+ // If it returns true, the package's exports will be loaded.
+ packageNameLoaded func(pkg *pkg) bool
+ // exportsLoaded is called when a package's exports have been loaded.
+ exportsLoaded func(pkg *pkg, exports []string)
}
-func (r *goPackagesResolver) ClearForNewScan() {}
-
-func (r *goPackagesResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- if len(importPaths) == 0 {
- return nil, nil
- }
- cfg := r.env.newPackagesConfig(packages.LoadFiles)
- pkgs, err := packages.Load(cfg, importPaths...)
- if err != nil {
- return nil, err
- }
- names := map[string]string{}
- for _, pkg := range pkgs {
- names[VendorlessPath(pkg.PkgPath)] = pkg.Name
- }
- // We may not have found all the packages. Guess the rest.
- for _, path := range importPaths {
- if _, ok := names[path]; ok {
- continue
- }
- names[path] = ImportPathToAssumedName(path)
- }
- return names, nil
-
-}
-
-func (r *goPackagesResolver) scan(refs references, _ bool, _ []gopathwalk.RootType) ([]*pkg, error) {
- var loadQueries []string
- for pkgName := range refs {
- loadQueries = append(loadQueries, "iamashamedtousethedisabledqueryname="+pkgName)
- }
- sort.Strings(loadQueries)
- cfg := r.env.newPackagesConfig(packages.LoadFiles)
- goPackages, err := packages.Load(cfg, loadQueries...)
- if err != nil {
- return nil, err
- }
-
- var scan []*pkg
- for _, goPackage := range goPackages {
- scan = append(scan, &pkg{
- dir: filepath.Dir(goPackage.CompiledGoFiles[0]),
- importPathShort: VendorlessPath(goPackage.PkgPath),
- goPackage: goPackage,
- packageName: goPackage.Name,
- })
- }
- return scan, nil
-}
-
-func (r *goPackagesResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
- if pkg.goPackage == nil {
- return "", nil, fmt.Errorf("goPackage not set")
- }
- var exports []string
- fset := token.NewFileSet()
- for _, fname := range pkg.goPackage.CompiledGoFiles {
- f, err := parser.ParseFile(fset, fname, nil, 0)
- if err != nil {
- return "", nil, fmt.Errorf("parsing %s: %v", fname, err)
- }
- for name := range f.Scope.Objects {
- if ast.IsExported(name) {
- exports = append(exports, name)
+func addExternalCandidates(pass *pass, refs references, filename string) error {
+ var mu sync.Mutex
+ found := make(map[string][]pkgDistance)
+ callback := &scanCallback{
+ rootFound: func(gopathwalk.Root) bool {
+ return true // We want everything.
+ },
+ dirFound: func(pkg *pkg) bool {
+ return pkgIsCandidate(filename, refs, pkg)
+ },
+ packageNameLoaded: func(pkg *pkg) bool {
+ if _, want := refs[pkg.packageName]; !want {
+ return false
}
- }
+ if pkg.dir == pass.srcDir && pass.f.Name.Name == pkg.packageName {
+ // The candidate is in the same directory and has the
+ // same package name. Don't try to import ourselves.
+ return false
+ }
+ if !canUse(filename, pkg.dir) {
+ return false
+ }
+ mu.Lock()
+ defer mu.Unlock()
+ found[pkg.packageName] = append(found[pkg.packageName], pkgDistance{pkg, distance(pass.srcDir, pkg.dir)})
+ return false // We'll do our own loading after we sort.
+ },
}
- return pkg.goPackage.Name, exports, nil
-}
-
-func addExternalCandidates(pass *pass, refs references, filename string) error {
- dirScan, err := pass.env.GetResolver().scan(refs, false, nil)
+ err := pass.env.GetResolver().scan(context.Background(), callback)
if err != nil {
return err
}
@@ -962,7 +949,7 @@ func addExternalCandidates(pass *pass, refs references, filename string) error {
go func(pkgName string, symbols map[string]bool) {
defer wg.Done()
- found, err := findImport(ctx, pass, dirScan, pkgName, symbols, filename)
+ found, err := findImport(ctx, pass, found[pkgName], pkgName, symbols, filename)
if err != nil {
firstErrOnce.Do(func() {
@@ -1033,24 +1020,36 @@ func ImportPathToAssumedName(importPath string) string {
// gopathResolver implements resolver for GOPATH workspaces.
type gopathResolver struct {
- env *ProcessEnv
- cache *dirInfoCache
+ env *ProcessEnv
+ walked bool
+ cache *dirInfoCache
+ scanSema chan struct{} // scanSema prevents concurrent scans.
}
-func (r *gopathResolver) init() {
- if r.cache == nil {
- r.cache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
- }
+func newGopathResolver(env *ProcessEnv) *gopathResolver {
+ r := &gopathResolver{
+ env: env,
+ cache: &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ },
+ scanSema: make(chan struct{}, 1),
}
+ r.scanSema <- struct{}{}
+ return r
}
func (r *gopathResolver) ClearForNewScan() {
- r.cache = nil
+ <-r.scanSema
+ r.cache = &dirInfoCache{
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
+ }
+ r.walked = false
+ r.scanSema <- struct{}{}
}
func (r *gopathResolver) loadPackageNames(importPaths []string, srcDir string) (map[string]string, error) {
- r.init()
names := map[string]string{}
for _, path := range importPaths {
names[path] = importPathToName(r.env, path, srcDir)
@@ -1130,7 +1129,6 @@ func packageDirToName(dir string) (packageName string, err error) {
}
type pkg struct {
- goPackage *packages.Package
dir string // absolute file path to pkg directory ("/usr/lib/go/src/net/http")
importPathShort string // vendorless import path ("net/http", "a/b")
packageName string // package name loaded from source if requested
@@ -1178,8 +1176,7 @@ func distance(basepath, targetpath string) int {
return strings.Count(p, string(filepath.Separator)) + 1
}
-func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) {
- r.init()
+func (r *gopathResolver) scan(ctx context.Context, callback *scanCallback) error {
add := func(root gopathwalk.Root, dir string) {
// We assume cached directories have not changed. We can skip them and their
// children.
@@ -1196,56 +1193,84 @@ func (r *gopathResolver) scan(_ references, loadNames bool, exclude []gopathwalk
}
r.cache.Store(dir, info)
}
- roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), exclude)
- gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
- var result []*pkg
- for _, dir := range r.cache.Keys() {
- info, ok := r.cache.Load(dir)
- if !ok {
- continue
- }
- if loadNames {
- var err error
- info, err = r.cache.CachePackageName(info)
- if err != nil {
- continue
- }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
}
p := &pkg{
importPathShort: info.nonCanonicalImportPath,
- dir: dir,
- relevance: 1,
- packageName: info.packageName,
+ dir: info.dir,
+ relevance: MaxRelevance - 1,
}
if info.rootType == gopathwalk.RootGOROOT {
- p.relevance = 0
+ p.relevance = MaxRelevance
+ }
+
+ if !callback.dirFound(p) {
+ return
}
- result = append(result, p)
+ var err error
+ p.packageName, err = r.cache.CachePackageName(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.packageNameLoaded(p) {
+ return
+ }
+ if _, exports, err := r.loadExports(ctx, p, false); err == nil {
+ callback.exportsLoaded(p, exports)
+ }
+ }
+ stop := r.cache.ScanAndListen(ctx, processDir)
+ defer stop()
+ // The callback is not necessarily safe to use in the goroutine below. Process roots eagerly.
+ roots := filterRoots(gopathwalk.SrcDirsRoots(r.env.buildContext()), callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ gopathwalk.Walk(roots, add, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: false})
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
}
- return result, nil
+ return nil
}
-func filterRoots(roots []gopathwalk.Root, exclude []gopathwalk.RootType) []gopathwalk.Root {
+func (r *gopathResolver) scoreImportPath(ctx context.Context, path string) int {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
+ }
+ return MaxRelevance - 1
+}
+
+func filterRoots(roots []gopathwalk.Root, include func(gopathwalk.Root) bool) []gopathwalk.Root {
var result []gopathwalk.Root
-outer:
for _, root := range roots {
- for _, i := range exclude {
- if i == root.Type {
- continue outer
- }
+ if !include(root) {
+ continue
}
result = append(result, root)
}
return result
}
-func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
- r.init()
- if info, ok := r.cache.Load(pkg.dir); ok {
+func (r *gopathResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
+ if info, ok := r.cache.Load(pkg.dir); ok && !includeTest {
return r.cache.CacheExports(ctx, r.env, info)
}
- return loadExportsFromFiles(ctx, r.env, pkg.dir)
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
// VendorlessPath returns the devendorized version of the import path ipath.
@@ -1261,7 +1286,7 @@ func VendorlessPath(ipath string) string {
return ipath
}
-func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (string, []string, error) {
+func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string, includeTest bool) (string, []string, error) {
var exports []string
// Look for non-test, buildable .go files which could provide exports.
@@ -1272,7 +1297,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
var files []os.FileInfo
for _, fi := range all {
name := fi.Name()
- if !strings.HasSuffix(name, ".go") || strings.HasSuffix(name, "_test.go") {
+ if !strings.HasSuffix(name, ".go") || (!includeTest && strings.HasSuffix(name, "_test.go")) {
continue
}
match, err := env.buildContext().MatchFile(dir, fi.Name())
@@ -1305,6 +1330,10 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// handled by MatchFile above.
continue
}
+ if includeTest && strings.HasSuffix(f.Name.Name, "_test") {
+ // x_test package. We want internal test files only.
+ continue
+ }
pkgName = f.Name.Name
for name := range f.Scope.Objects {
if ast.IsExported(name) {
@@ -1323,29 +1352,7 @@ func loadExportsFromFiles(ctx context.Context, env *ProcessEnv, dir string) (str
// findImport searches for a package with the given symbols.
// If no package is found, findImport returns ("", false, nil)
-func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
- pkgDir, err := filepath.Abs(filename)
- if err != nil {
- return nil, err
- }
- pkgDir = filepath.Dir(pkgDir)
-
- // Find candidate packages, looking only at their directory names first.
- var candidates []pkgDistance
- for _, pkg := range dirScan {
- if pkg.dir == pkgDir && pass.f.Name.Name == pkgName {
- // The candidate is in the same directory and has the
- // same package name. Don't try to import ourselves.
- continue
- }
- if pkgIsCandidate(filename, pkgName, pkg) {
- candidates = append(candidates, pkgDistance{
- pkg: pkg,
- distance: distance(pkgDir, pkg.dir),
- })
- }
- }
-
+func findImport(ctx context.Context, pass *pass, candidates []pkgDistance, pkgName string, symbols map[string]bool, filename string) (*pkg, error) {
// Sort the candidates by their import package length,
// assuming that shorter package names are better than long
// ones. Note that this sorts by the de-vendored name, so
@@ -1358,7 +1365,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
}
// Collect exports for packages with matching names.
-
rescv := make([]chan *pkg, len(candidates))
for i := range candidates {
rescv[i] = make(chan *pkg, 1)
@@ -1393,7 +1399,9 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s)", c.pkg.dir, pkgName)
}
- exports, err := loadExportsForPackage(ctx, pass.env, pkgName, c.pkg)
+ // If we're an x_test, load the package under test's test variant.
+ includeTest := strings.HasSuffix(pass.f.Name.Name, "_test") && c.pkg.dir == pass.srcDir
+ _, exports, err := pass.env.GetResolver().loadExports(ctx, c.pkg, includeTest)
if err != nil {
if pass.env.Debug {
pass.env.Logf("loading exports in dir %s (seeking package %s): %v", c.pkg.dir, pkgName, err)
@@ -1430,17 +1438,6 @@ func findImport(ctx context.Context, pass *pass, dirScan []*pkg, pkgName string,
return nil, nil
}
-func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg string, pkg *pkg) ([]string, error) {
- pkgName, exports, err := env.GetResolver().loadExports(ctx, pkg)
- if err != nil {
- return nil, err
- }
- if expectPkg != pkgName {
- return nil, fmt.Errorf("dir %v is package %v, wanted %v", pkg.dir, pkgName, expectPkg)
- }
- return exports, err
-}
-
// pkgIsCandidate reports whether pkg is a candidate for satisfying the
// finding which package pkgIdent in the file named by filename is trying
// to refer to.
@@ -1453,7 +1450,7 @@ func loadExportsForPackage(ctx context.Context, env *ProcessEnv, expectPkg strin
// filename is the file being formatted.
// pkgIdent is the package being searched for, like "client" (if
// searching for "client.New")
-func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
+func pkgIsCandidate(filename string, refs references, pkg *pkg) bool {
// Check "internal" and "vendor" visibility:
if !canUse(filename, pkg.dir) {
return false
@@ -1471,17 +1468,18 @@ func pkgIsCandidate(filename, pkgIdent string, pkg *pkg) bool {
// "bar", which is strongly discouraged
// anyway. There's no reason goimports needs
// to be slow just to accommodate that.
- lastTwo := lastTwoComponents(pkg.importPathShort)
- if strings.Contains(lastTwo, pkgIdent) {
- return true
- }
- if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
- lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ for pkgIdent := range refs {
+ lastTwo := lastTwoComponents(pkg.importPathShort)
if strings.Contains(lastTwo, pkgIdent) {
return true
}
+ if hasHyphenOrUpperASCII(lastTwo) && !hasHyphenOrUpperASCII(pkgIdent) {
+ lastTwo = lowerASCIIAndRemoveHyphen(lastTwo)
+ if strings.Contains(lastTwo, pkgIdent) {
+ return true
+ }
+ }
}
-
return false
}
diff --git a/vendor/golang.org/x/tools/internal/imports/imports.go b/vendor/golang.org/x/tools/internal/imports/imports.go
index e066d909e4..2e7a317e55 100644
--- a/vendor/golang.org/x/tools/internal/imports/imports.go
+++ b/vendor/golang.org/x/tools/internal/imports/imports.go
@@ -11,6 +11,7 @@ package imports
import (
"bufio"
"bytes"
+ "context"
"fmt"
"go/ast"
"go/build"
@@ -21,6 +22,7 @@ import (
"io"
"io/ioutil"
"log"
+ "os"
"regexp"
"strconv"
"strings"
@@ -114,23 +116,23 @@ func ApplyFixes(fixes []*ImportFix, filename string, src []byte, opt *Options, e
return formatFile(fileSet, file, src, nil, opt)
}
-// GetAllCandidates gets all of the standard library candidate packages to import in
-// sorted order on import path.
-func GetAllCandidates(filename string, opt *Options) (pkgs []ImportFix, err error) {
- _, opt, err = initialize(filename, nil, opt)
+// GetAllCandidates gets all of the packages starting with prefix that can be
+// imported by filename, sorted by import path.
+func GetAllCandidates(ctx context.Context, callback func(ImportFix), searchPrefix, filename, filePkg string, opt *Options) error {
+ _, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
- return nil, err
+ return err
}
- return getAllCandidates(filename, opt.Env)
+ return getAllCandidates(ctx, callback, searchPrefix, filename, filePkg, opt.Env)
}
// GetPackageExports returns all known packages with name pkg and their exports.
-func GetPackageExports(pkg, filename string, opt *Options) (exports []PackageExport, err error) {
- _, opt, err = initialize(filename, nil, opt)
+func GetPackageExports(ctx context.Context, callback func(PackageExport), searchPkg, filename, filePkg string, opt *Options) error {
+ _, opt, err := initialize(filename, []byte{}, opt)
if err != nil {
- return nil, err
+ return err
}
- return getPackageExports(pkg, filename, opt.Env)
+ return getPackageExports(ctx, callback, searchPkg, filename, filePkg, opt.Env)
}
// initialize sets the values for opt and src.
@@ -145,8 +147,12 @@ func initialize(filename string, src []byte, opt *Options) ([]byte, *Options, er
// Set the env if the user has not provided it.
if opt.Env == nil {
opt.Env = &ProcessEnv{
- GOPATH: build.Default.GOPATH,
- GOROOT: build.Default.GOROOT,
+ GOPATH: build.Default.GOPATH,
+ GOROOT: build.Default.GOROOT,
+ GOFLAGS: os.Getenv("GOFLAGS"),
+ GO111MODULE: os.Getenv("GO111MODULE"),
+ GOPROXY: os.Getenv("GOPROXY"),
+ GOSUMDB: os.Getenv("GOSUMDB"),
}
}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod.go b/vendor/golang.org/x/tools/internal/imports/mod.go
index 0f9b87eb73..28d4b1ff33 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod.go
@@ -13,11 +13,10 @@ import (
"sort"
"strconv"
"strings"
- "sync"
+ "golang.org/x/mod/module"
+ "golang.org/x/mod/semver"
"golang.org/x/tools/internal/gopathwalk"
- "golang.org/x/tools/internal/module"
- "golang.org/x/tools/internal/semver"
)
// ModuleResolver implements resolver for modules using the go command as little
@@ -26,11 +25,14 @@ type ModuleResolver struct {
env *ProcessEnv
moduleCacheDir string
dummyVendorMod *ModuleJSON // If vendoring is enabled, the pseudo-module that represents the /vendor directory.
+ roots []gopathwalk.Root
+ scanSema chan struct{} // scanSema prevents concurrent scans and guards scannedRoots.
+ scannedRoots map[gopathwalk.Root]bool
- Initialized bool
- Main *ModuleJSON
- ModsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
- ModsByDir []*ModuleJSON // ...or Dir.
+ initialized bool
+ main *ModuleJSON
+ modsByModPath []*ModuleJSON // All modules, ordered by # of path components in module Path...
+ modsByDir []*ModuleJSON // ...or Dir.
// moduleCacheCache stores information about the module cache.
moduleCacheCache *dirInfoCache
@@ -41,13 +43,23 @@ type ModuleJSON struct {
Path string // module path
Replace *ModuleJSON // replaced by this module
Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
Dir string // directory holding files for this module, if any
GoMod string // path to go.mod file for this module, if any
GoVersion string // go version used in module
}
+func newModuleResolver(e *ProcessEnv) *ModuleResolver {
+ r := &ModuleResolver{
+ env: e,
+ scanSema: make(chan struct{}, 1),
+ }
+ r.scanSema <- struct{}{}
+ return r
+}
+
func (r *ModuleResolver) init() error {
- if r.Initialized {
+ if r.initialized {
return nil
}
mainMod, vendorEnabled, err := vendorEnabled(r.env)
@@ -58,13 +70,13 @@ func (r *ModuleResolver) init() error {
if mainMod != nil && vendorEnabled {
// Vendor mode is on, so all the non-Main modules are irrelevant,
// and we need to search /vendor for everything.
- r.Main = mainMod
+ r.main = mainMod
r.dummyVendorMod = &ModuleJSON{
Path: "",
Dir: filepath.Join(mainMod.Dir, "vendor"),
}
- r.ModsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
- r.ModsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
+ r.modsByModPath = []*ModuleJSON{mainMod, r.dummyVendorMod}
+ r.modsByDir = []*ModuleJSON{mainMod, r.dummyVendorMod}
} else {
// Vendor mode is off, so run go list -m ... to find everything.
r.initAllMods()
@@ -72,35 +84,69 @@ func (r *ModuleResolver) init() error {
r.moduleCacheDir = filepath.Join(filepath.SplitList(r.env.GOPATH)[0], "/pkg/mod")
- sort.Slice(r.ModsByModPath, func(i, j int) bool {
+ sort.Slice(r.modsByModPath, func(i, j int) bool {
count := func(x int) int {
- return strings.Count(r.ModsByModPath[x].Path, "/")
+ return strings.Count(r.modsByModPath[x].Path, "/")
}
return count(j) < count(i) // descending order
})
- sort.Slice(r.ModsByDir, func(i, j int) bool {
+ sort.Slice(r.modsByDir, func(i, j int) bool {
count := func(x int) int {
- return strings.Count(r.ModsByDir[x].Dir, "/")
+ return strings.Count(r.modsByDir[x].Dir, "/")
}
return count(j) < count(i) // descending order
})
+ r.roots = []gopathwalk.Root{
+ {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
+ }
+ if r.main != nil {
+ r.roots = append(r.roots, gopathwalk.Root{r.main.Dir, gopathwalk.RootCurrentModule})
+ }
+ if vendorEnabled {
+ r.roots = append(r.roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
+ } else {
+ addDep := func(mod *ModuleJSON) {
+ if mod.Replace == nil {
+ // This is redundant with the cache, but we'll skip it cheaply enough.
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootModuleCache})
+ } else {
+ r.roots = append(r.roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
+ }
+ }
+ // Walk dependent modules before scanning the full mod cache, direct deps first.
+ for _, mod := range r.modsByModPath {
+ if !mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ for _, mod := range r.modsByModPath {
+ if mod.Indirect && !mod.Main {
+ addDep(mod)
+ }
+ }
+ r.roots = append(r.roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
+ }
+
+ r.scannedRoots = map[gopathwalk.Root]bool{}
if r.moduleCacheCache == nil {
r.moduleCacheCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
}
if r.otherCache == nil {
r.otherCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
}
- r.Initialized = true
+ r.initialized = true
return nil
}
func (r *ModuleResolver) initAllMods() error {
- stdout, err := r.env.invokeGo("list", "-m", "-json", "...")
+ stdout, err := r.env.invokeGo(context.TODO(), "list", "-m", "-json", "...")
if err != nil {
return err
}
@@ -116,27 +162,37 @@ func (r *ModuleResolver) initAllMods() error {
// Can't do anything with a module that's not downloaded.
continue
}
- r.ModsByModPath = append(r.ModsByModPath, mod)
- r.ModsByDir = append(r.ModsByDir, mod)
+ // golang/go#36193: the go command doesn't always clean paths.
+ mod.Dir = filepath.Clean(mod.Dir)
+ r.modsByModPath = append(r.modsByModPath, mod)
+ r.modsByDir = append(r.modsByDir, mod)
if mod.Main {
- r.Main = mod
+ r.main = mod
}
}
return nil
}
func (r *ModuleResolver) ClearForNewScan() {
+ <-r.scanSema
+ r.scannedRoots = map[gopathwalk.Root]bool{}
r.otherCache = &dirInfoCache{
- dirs: map[string]*directoryPackageInfo{},
+ dirs: map[string]*directoryPackageInfo{},
+ listeners: map[*int]cacheListener{},
}
+ r.scanSema <- struct{}{}
}
func (r *ModuleResolver) ClearForNewMod() {
- env := r.env
+ <-r.scanSema
*r = ModuleResolver{
- env: env,
+ env: r.env,
+ moduleCacheCache: r.moduleCacheCache,
+ otherCache: r.otherCache,
+ scanSema: r.scanSema,
}
r.init()
+ r.scanSema <- struct{}{}
}
// findPackage returns the module and directory that contains the package at
@@ -144,7 +200,7 @@ func (r *ModuleResolver) ClearForNewMod() {
func (r *ModuleResolver) findPackage(importPath string) (*ModuleJSON, string) {
// This can't find packages in the stdlib, but that's harmless for all
// the existing code paths.
- for _, m := range r.ModsByModPath {
+ for _, m := range r.modsByModPath {
if !strings.HasPrefix(importPath, m.Path) {
continue
}
@@ -211,7 +267,7 @@ func (r *ModuleResolver) cacheKeys() []string {
}
// cachePackageName caches the package name for a dir already in the cache.
-func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) {
+func (r *ModuleResolver) cachePackageName(info directoryPackageInfo) (string, error) {
if info.rootType == gopathwalk.RootModuleCache {
return r.moduleCacheCache.CachePackageName(info)
}
@@ -238,7 +294,7 @@ func (r *ModuleResolver) findModuleByDir(dir string) *ModuleJSON {
// - in /vendor/ in -mod=vendor mode.
// - nested module? Dunno.
// Rumor has it that replace targets cannot contain other replace targets.
- for _, m := range r.ModsByDir {
+ for _, m := range r.modsByDir {
if !strings.HasPrefix(dir, m.Dir) {
continue
}
@@ -333,41 +389,49 @@ func (r *ModuleResolver) loadPackageNames(importPaths []string, srcDir string) (
return names, nil
}
-func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk.RootType) ([]*pkg, error) {
+func (r *ModuleResolver) scan(ctx context.Context, callback *scanCallback) error {
if err := r.init(); err != nil {
- return nil, err
+ return err
}
- // Walk GOROOT, GOPATH/pkg/mod, and the main module.
- roots := []gopathwalk.Root{
- {filepath.Join(r.env.GOROOT, "/src"), gopathwalk.RootGOROOT},
- }
- if r.Main != nil {
- roots = append(roots, gopathwalk.Root{r.Main.Dir, gopathwalk.RootCurrentModule})
- }
- if r.dummyVendorMod != nil {
- roots = append(roots, gopathwalk.Root{r.dummyVendorMod.Dir, gopathwalk.RootOther})
- } else {
- roots = append(roots, gopathwalk.Root{r.moduleCacheDir, gopathwalk.RootModuleCache})
- // Walk replace targets, just in case they're not in any of the above.
- for _, mod := range r.ModsByModPath {
- if mod.Replace != nil {
- roots = append(roots, gopathwalk.Root{mod.Dir, gopathwalk.RootOther})
- }
+ processDir := func(info directoryPackageInfo) {
+ // Skip this directory if we were not able to get the package information successfully.
+ if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
+ return
+ }
+ pkg, err := r.canonicalize(info)
+ if err != nil {
+ return
+ }
+
+ if !callback.dirFound(pkg) {
+ return
+ }
+ pkg.packageName, err = r.cachePackageName(info)
+ if err != nil {
+ return
}
- }
- roots = filterRoots(roots, exclude)
+ if !callback.packageNameLoaded(pkg) {
+ return
+ }
+ _, exports, err := r.loadExports(ctx, pkg, false)
+ if err != nil {
+ return
+ }
+ callback.exportsLoaded(pkg, exports)
+ }
- var result []*pkg
- var mu sync.Mutex
+ // Start processing everything in the cache, and listen for the new stuff
+ // we discover in the walk below.
+ stop1 := r.moduleCacheCache.ScanAndListen(ctx, processDir)
+ defer stop1()
+ stop2 := r.otherCache.ScanAndListen(ctx, processDir)
+ defer stop2()
- // We assume cached directories have not changed. We can skip them and their
- // children.
+ // We assume cached directories are fully cached, including all their
+ // children, and have not changed. We can skip them.
skip := func(root gopathwalk.Root, dir string) bool {
- mu.Lock()
- defer mu.Unlock()
-
info, ok := r.cacheLoad(dir)
if !ok {
return false
@@ -379,44 +443,64 @@ func (r *ModuleResolver) scan(_ references, loadNames bool, exclude []gopathwalk
return packageScanned
}
- // Add anything new to the cache. We'll process everything in it below.
+ // Add anything new to the cache, and process it if we're still listening.
add := func(root gopathwalk.Root, dir string) {
- mu.Lock()
- defer mu.Unlock()
-
r.cacheStore(r.scanDirForPackage(root, dir))
}
- gopathwalk.WalkSkip(roots, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
-
- // Everything we already had, and everything new, is now in the cache.
- for _, dir := range r.cacheKeys() {
- info, ok := r.cacheLoad(dir)
- if !ok {
- continue
- }
-
- // Skip this directory if we were not able to get the package information successfully.
- if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- continue
- }
+ // r.roots and the callback are not necessarily safe to use in the
+ // goroutine below. Process them eagerly.
+ roots := filterRoots(r.roots, callback.rootFound)
+ // We can't cancel walks, because we need them to finish to have a usable
+ // cache. Instead, run them in a separate goroutine and detach.
+ scanDone := make(chan struct{})
+ go func() {
+ select {
+ case <-ctx.Done():
+ return
+ case <-r.scanSema:
+ }
+ defer func() { r.scanSema <- struct{}{} }()
+ // We have the lock on r.scannedRoots, and no other scans can run.
+ for _, root := range roots {
+ if ctx.Err() != nil {
+ return
+ }
- // If we want package names, make sure the cache has them.
- if loadNames {
- var err error
- if info, err = r.cachePackageName(info); err != nil {
+ if r.scannedRoots[root] {
continue
}
+ gopathwalk.WalkSkip([]gopathwalk.Root{root}, add, skip, gopathwalk.Options{Debug: r.env.Debug, ModulesEnabled: true})
+ r.scannedRoots[root] = true
}
+ close(scanDone)
+ }()
+ select {
+ case <-ctx.Done():
+ case <-scanDone:
+ }
+ return nil
+}
- res, err := r.canonicalize(info)
- if err != nil {
- continue
- }
- result = append(result, res)
+func (r *ModuleResolver) scoreImportPath(ctx context.Context, path string) int {
+ if _, ok := stdlib[path]; ok {
+ return MaxRelevance
}
+ mod, _ := r.findPackage(path)
+ return modRelevance(mod)
+}
- return result, nil
+func modRelevance(mod *ModuleJSON) int {
+ switch {
+ case mod == nil: // out of scope
+ return MaxRelevance - 4
+ case mod.Indirect:
+ return MaxRelevance - 3
+ case !mod.Main:
+ return MaxRelevance - 2
+ default:
+ return MaxRelevance - 1 // main module ties with stdlib
+ }
}
// canonicalize gets the result of canonicalizing the packages using the results
@@ -428,15 +512,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
importPathShort: info.nonCanonicalImportPath,
dir: info.dir,
packageName: path.Base(info.nonCanonicalImportPath),
- relevance: 0,
+ relevance: MaxRelevance,
}, nil
}
importPath := info.nonCanonicalImportPath
- relevance := 2
+ mod := r.findModuleByDir(info.dir)
// Check if the directory is underneath a module that's in scope.
- if mod := r.findModuleByDir(info.dir); mod != nil {
- relevance = 1
+ if mod != nil {
// It is. If dir is the target of a replace directive,
// our guessed import path is wrong. Use the real one.
if mod.Dir == info.dir {
@@ -445,15 +528,16 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
dirInMod := info.dir[len(mod.Dir)+len("/"):]
importPath = path.Join(mod.Path, filepath.ToSlash(dirInMod))
}
- } else if info.needsReplace {
+ } else if !strings.HasPrefix(importPath, info.moduleName) {
+ // The module's name doesn't match the package's import path. It
+ // probably needs a replace directive we don't have.
return nil, fmt.Errorf("package in %q is not valid without a replace statement", info.dir)
}
res := &pkg{
importPathShort: importPath,
dir: info.dir,
- packageName: info.packageName, // may not be populated if the caller didn't ask for it
- relevance: relevance,
+ relevance: modRelevance(mod),
}
// We may have discovered a package that has a different version
// in scope already. Canonicalize to that one if possible.
@@ -463,14 +547,14 @@ func (r *ModuleResolver) canonicalize(info directoryPackageInfo) (*pkg, error) {
return res, nil
}
-func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg) (string, []string, error) {
+func (r *ModuleResolver) loadExports(ctx context.Context, pkg *pkg, includeTest bool) (string, []string, error) {
if err := r.init(); err != nil {
return "", nil, err
}
- if info, ok := r.cacheLoad(pkg.dir); ok {
+ if info, ok := r.cacheLoad(pkg.dir); ok && !includeTest {
return r.cacheExports(ctx, r.env, info)
}
- return loadExportsFromFiles(ctx, r.env, pkg.dir)
+ return loadExportsFromFiles(ctx, r.env, pkg.dir, includeTest)
}
func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) directoryPackageInfo {
@@ -488,7 +572,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
}
switch root.Type {
case gopathwalk.RootCurrentModule:
- importPath = path.Join(r.Main.Path, filepath.ToSlash(subdir))
+ importPath = path.Join(r.main.Path, filepath.ToSlash(subdir))
case gopathwalk.RootModuleCache:
matches := modCacheRegexp.FindStringSubmatch(subdir)
if len(matches) == 0 {
@@ -497,7 +581,7 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
err: fmt.Errorf("invalid module cache path: %v", subdir),
}
}
- modPath, err := module.DecodePath(filepath.ToSlash(matches[1]))
+ modPath, err := module.UnescapePath(filepath.ToSlash(matches[1]))
if err != nil {
if r.env.Debug {
r.env.Logf("decoding module cache path %q: %v", subdir, err)
@@ -516,7 +600,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
dir: dir,
rootType: root.Type,
nonCanonicalImportPath: importPath,
- needsReplace: false,
moduleDir: modDir,
moduleName: modName,
}
@@ -524,14 +607,6 @@ func (r *ModuleResolver) scanDirForPackage(root gopathwalk.Root, dir string) dir
// stdlib packages are always in scope, despite the confusing go.mod
return result
}
- // Check that this package is not obviously impossible to import.
- if !strings.HasPrefix(importPath, modName) {
- // The module's declared path does not match
- // its expected path. It probably needs a
- // replace directive we don't have.
- result.needsReplace = true
- }
-
return result
}
@@ -624,7 +699,7 @@ func getMainModuleAnd114(env *ProcessEnv) (*ModuleJSON, bool, error) {
{{.GoVersion}}
{{range context.ReleaseTags}}{{if eq . "go1.14"}}{{.}}{{end}}{{end}}
`
- stdout, err := env.invokeGo("list", "-m", "-f", format)
+ stdout, err := env.invokeGo(context.TODO(), "list", "-m", "-f", format)
if err != nil {
return nil, false, nil
}
diff --git a/vendor/golang.org/x/tools/internal/imports/mod_cache.go b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
index f6b070a3f6..5b4f03accd 100644
--- a/vendor/golang.org/x/tools/internal/imports/mod_cache.go
+++ b/vendor/golang.org/x/tools/internal/imports/mod_cache.go
@@ -49,10 +49,6 @@ type directoryPackageInfo struct {
// nonCanonicalImportPath is the package's expected import path. It may
// not actually be importable at that path.
nonCanonicalImportPath string
- // needsReplace is true if the nonCanonicalImportPath does not match the
- // module's declared path, making it impossible to import without a
- // replace directive.
- needsReplace bool
// Module-related information.
moduleDir string // The directory that is the module root of this dir.
@@ -97,15 +93,86 @@ func (info *directoryPackageInfo) reachedStatus(target directoryPackageStatus) (
type dirInfoCache struct {
mu sync.Mutex
// dirs stores information about packages in directories, keyed by absolute path.
- dirs map[string]*directoryPackageInfo
+ dirs map[string]*directoryPackageInfo
+ listeners map[*int]cacheListener
+}
+
+type cacheListener func(directoryPackageInfo)
+
+// ScanAndListen calls listener on all the items in the cache, and on anything
+// newly added. The returned stop function waits for all in-flight callbacks to
+// finish and blocks new ones.
+func (d *dirInfoCache) ScanAndListen(ctx context.Context, listener cacheListener) func() {
+ ctx, cancel := context.WithCancel(ctx)
+
+ // Flushing out all the callbacks is tricky without knowing how many there
+ // are going to be. Setting an arbitrary limit makes it much easier.
+ const maxInFlight = 10
+ sema := make(chan struct{}, maxInFlight)
+ for i := 0; i < maxInFlight; i++ {
+ sema <- struct{}{}
+ }
+
+ cookie := new(int) // A unique ID we can use for the listener.
+
+ // We can't hold mu while calling the listener.
+ d.mu.Lock()
+ var keys []string
+ for key := range d.dirs {
+ keys = append(keys, key)
+ }
+ d.listeners[cookie] = func(info directoryPackageInfo) {
+ select {
+ case <-ctx.Done():
+ return
+ case <-sema:
+ }
+ listener(info)
+ sema <- struct{}{}
+ }
+ d.mu.Unlock()
+
+ stop := func() {
+ cancel()
+ d.mu.Lock()
+ delete(d.listeners, cookie)
+ d.mu.Unlock()
+ for i := 0; i < maxInFlight; i++ {
+ <-sema
+ }
+ }
+
+ // Process the pre-existing keys.
+ for _, k := range keys {
+ select {
+ case <-ctx.Done():
+ return stop
+ default:
+ }
+ if v, ok := d.Load(k); ok {
+ listener(v)
+ }
+ }
+
+ return stop
}
// Store stores the package info for dir.
func (d *dirInfoCache) Store(dir string, info directoryPackageInfo) {
d.mu.Lock()
- defer d.mu.Unlock()
- stored := info // defensive copy
- d.dirs[dir] = &stored
+ _, old := d.dirs[dir]
+ d.dirs[dir] = &info
+ var listeners []cacheListener
+ for _, l := range d.listeners {
+ listeners = append(listeners, l)
+ }
+ d.mu.Unlock()
+
+ if !old {
+ for _, l := range listeners {
+ l(info)
+ }
+ }
}
// Load returns a copy of the directoryPackageInfo for absolute directory dir.
@@ -129,17 +196,17 @@ func (d *dirInfoCache) Keys() (keys []string) {
return keys
}
-func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (directoryPackageInfo, error) {
+func (d *dirInfoCache) CachePackageName(info directoryPackageInfo) (string, error) {
if loaded, err := info.reachedStatus(nameLoaded); loaded {
- return info, err
+ return info.packageName, err
}
if scanned, err := info.reachedStatus(directoryScanned); !scanned || err != nil {
- return info, fmt.Errorf("cannot read package name, scan error: %v", err)
+ return "", fmt.Errorf("cannot read package name, scan error: %v", err)
}
info.packageName, info.err = packageDirToName(info.dir)
info.status = nameLoaded
d.Store(info.dir, info)
- return info, info.err
+ return info.packageName, info.err
}
func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info directoryPackageInfo) (string, []string, error) {
@@ -149,8 +216,8 @@ func (d *dirInfoCache) CacheExports(ctx context.Context, env *ProcessEnv, info d
if reached, err := info.reachedStatus(nameLoaded); reached && err != nil {
return "", nil, err
}
- info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir)
- if info.err == context.Canceled {
+ info.packageName, info.exports, info.err = loadExportsFromFiles(ctx, env, info.dir, false)
+ if info.err == context.Canceled || info.err == context.DeadlineExceeded {
return info.packageName, info.exports, info.err
}
// The cache structure wants things to proceed linearly. We can skip a
diff --git a/vendor/golang.org/x/tools/internal/packagesinternal/packages.go b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
new file mode 100644
index 0000000000..b13ce33a39
--- /dev/null
+++ b/vendor/golang.org/x/tools/internal/packagesinternal/packages.go
@@ -0,0 +1,27 @@
+// Package packagesinternal exposes internal-only fields from go/packages.
+package packagesinternal
+
+import "time"
+
+// Fields must match go list;
+type Module struct {
+ Path string // module path
+ Version string // module version
+ Versions []string // available module versions (with -versions)
+ Replace *Module // replaced by this module
+ Time *time.Time // time version was created
+ Update *Module // available update, if any (with -u)
+ Main bool // is this the main module?
+ Indirect bool // is this module only an indirect dependency of main module?
+ Dir string // directory holding files for this module, if any
+ GoMod string // path to go.mod file used when loading this module, if any
+ GoVersion string // go version used in module
+ Error *ModuleError // error loading module
+}
+type ModuleError struct {
+ Err string // the error itself
+}
+
+var GetForTest = func(p interface{}) string { return "" }
+
+var GetModule = func(p interface{}) *Module { return nil }
diff --git a/vendor/golang.org/x/xerrors/LICENSE b/vendor/golang.org/x/xerrors/LICENSE
new file mode 100644
index 0000000000..e4a47e17f1
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2019 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/vendor/golang.org/x/xerrors/PATENTS b/vendor/golang.org/x/xerrors/PATENTS
new file mode 100644
index 0000000000..733099041f
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/PATENTS
@@ -0,0 +1,22 @@
+Additional IP Rights Grant (Patents)
+
+"This implementation" means the copyrightable works distributed by
+Google as part of the Go project.
+
+Google hereby grants to You a perpetual, worldwide, non-exclusive,
+no-charge, royalty-free, irrevocable (except as stated in this section)
+patent license to make, have made, use, offer to sell, sell, import,
+transfer and otherwise run, modify and propagate the contents of this
+implementation of Go, where such license applies only to those patent
+claims, both currently owned or controlled by Google and acquired in
+the future, licensable by Google that are necessarily infringed by this
+implementation of Go. This grant does not include claims that would be
+infringed only as a consequence of further modification of this
+implementation. If you or your agent or exclusive licensee institute or
+order or agree to the institution of patent litigation against any
+entity (including a cross-claim or counterclaim in a lawsuit) alleging
+that this implementation of Go or any code incorporated within this
+implementation of Go constitutes direct or contributory patent
+infringement, or inducement of patent infringement, then any patent
+rights granted to you under this License for this implementation of Go
+shall terminate as of the date such litigation is filed.
diff --git a/vendor/golang.org/x/xerrors/README b/vendor/golang.org/x/xerrors/README
new file mode 100644
index 0000000000..aac7867a56
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/README
@@ -0,0 +1,2 @@
+This repository holds the transition packages for the new Go 1.13 error values.
+See golang.org/design/29934-error-values.
diff --git a/vendor/golang.org/x/xerrors/adaptor.go b/vendor/golang.org/x/xerrors/adaptor.go
new file mode 100644
index 0000000000..4317f24833
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/adaptor.go
@@ -0,0 +1,193 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "reflect"
+ "strconv"
+)
+
+// FormatError calls the FormatError method of f with an errors.Printer
+// configured according to s and verb, and writes the result to s.
+func FormatError(f Formatter, s fmt.State, verb rune) {
+ // Assuming this function is only called from the Format method, and given
+ // that FormatError takes precedence over Format, it cannot be called from
+ // any package that supports errors.Formatter. It is therefore safe to
+ // disregard that State may be a specific printer implementation and use one
+ // of our choice instead.
+
+ // limitations: does not support printing error as Go struct.
+
+ var (
+ sep = " " // separator before next error
+ p = &state{State: s}
+ direct = true
+ )
+
+ var err error = f
+
+ switch verb {
+ // Note that this switch must match the preference order
+ // for ordinary string printing (%#v before %+v, and so on).
+
+ case 'v':
+ if s.Flag('#') {
+ if stringer, ok := err.(fmt.GoStringer); ok {
+ io.WriteString(&p.buf, stringer.GoString())
+ goto exit
+ }
+ // proceed as if it were %v
+ } else if s.Flag('+') {
+ p.printDetail = true
+ sep = "\n - "
+ }
+ case 's':
+ case 'q', 'x', 'X':
+ // Use an intermediate buffer in the rare cases that precision,
+ // truncation, or one of the alternative verbs (q, x, and X) are
+ // specified.
+ direct = false
+
+ default:
+ p.buf.WriteString("%!")
+ p.buf.WriteRune(verb)
+ p.buf.WriteByte('(')
+ switch {
+ case err != nil:
+ p.buf.WriteString(reflect.TypeOf(f).String())
+ default:
+ p.buf.WriteString("<nil>")
+ }
+ p.buf.WriteByte(')')
+ io.Copy(s, &p.buf)
+ return
+ }
+
+loop:
+ for {
+ switch v := err.(type) {
+ case Formatter:
+ err = v.FormatError((*printer)(p))
+ case fmt.Formatter:
+ v.Format(p, 'v')
+ break loop
+ default:
+ io.WriteString(&p.buf, v.Error())
+ break loop
+ }
+ if err == nil {
+ break
+ }
+ if p.needColon || !p.printDetail {
+ p.buf.WriteByte(':')
+ p.needColon = false
+ }
+ p.buf.WriteString(sep)
+ p.inDetail = false
+ p.needNewline = false
+ }
+
+exit:
+ width, okW := s.Width()
+ prec, okP := s.Precision()
+
+ if !direct || (okW && width > 0) || okP {
+ // Construct format string from State s.
+ format := []byte{'%'}
+ if s.Flag('-') {
+ format = append(format, '-')
+ }
+ if s.Flag('+') {
+ format = append(format, '+')
+ }
+ if s.Flag(' ') {
+ format = append(format, ' ')
+ }
+ if okW {
+ format = strconv.AppendInt(format, int64(width), 10)
+ }
+ if okP {
+ format = append(format, '.')
+ format = strconv.AppendInt(format, int64(prec), 10)
+ }
+ format = append(format, string(verb)...)
+ fmt.Fprintf(s, string(format), p.buf.String())
+ } else {
+ io.Copy(s, &p.buf)
+ }
+}
+
+var detailSep = []byte("\n ")
+
+// state tracks error printing state. It implements fmt.State.
+type state struct {
+ fmt.State
+ buf bytes.Buffer
+
+ printDetail bool
+ inDetail bool
+ needColon bool
+ needNewline bool
+}
+
+func (s *state) Write(b []byte) (n int, err error) {
+ if s.printDetail {
+ if len(b) == 0 {
+ return 0, nil
+ }
+ if s.inDetail && s.needColon {
+ s.needNewline = true
+ if b[0] == '\n' {
+ b = b[1:]
+ }
+ }
+ k := 0
+ for i, c := range b {
+ if s.needNewline {
+ if s.inDetail && s.needColon {
+ s.buf.WriteByte(':')
+ s.needColon = false
+ }
+ s.buf.Write(detailSep)
+ s.needNewline = false
+ }
+ if c == '\n' {
+ s.buf.Write(b[k:i])
+ k = i + 1
+ s.needNewline = true
+ }
+ }
+ s.buf.Write(b[k:])
+ if !s.inDetail {
+ s.needColon = true
+ }
+ } else if !s.inDetail {
+ s.buf.Write(b)
+ }
+ return len(b), nil
+}
+
+// printer wraps a state to implement an xerrors.Printer.
+type printer state
+
+func (s *printer) Print(args ...interface{}) {
+ if !s.inDetail || s.printDetail {
+ fmt.Fprint((*state)(s), args...)
+ }
+}
+
+func (s *printer) Printf(format string, args ...interface{}) {
+ if !s.inDetail || s.printDetail {
+ fmt.Fprintf((*state)(s), format, args...)
+ }
+}
+
+func (s *printer) Detail() bool {
+ s.inDetail = true
+ return s.printDetail
+}
diff --git a/vendor/golang.org/x/xerrors/codereview.cfg b/vendor/golang.org/x/xerrors/codereview.cfg
new file mode 100644
index 0000000000..3f8b14b64e
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/codereview.cfg
@@ -0,0 +1 @@
+issuerepo: golang/go
diff --git a/vendor/golang.org/x/xerrors/doc.go b/vendor/golang.org/x/xerrors/doc.go
new file mode 100644
index 0000000000..eef99d9d54
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/doc.go
@@ -0,0 +1,22 @@
+// Copyright 2019 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package xerrors implements functions to manipulate errors.
+//
+// This package is based on the Go 2 proposal for error values:
+// https://golang.org/design/29934-error-values
+//
+// These functions were incorporated into the standard library's errors package
+// in Go 1.13:
+// - Is
+// - As
+// - Unwrap
+//
+// Also, Errorf's %w verb was incorporated into fmt.Errorf.
+//
+// Use this package to get equivalent behavior in all supported Go versions.
+//
+// No other features of this package were included in Go 1.13, and at present
+// there are no plans to include any of them.
+package xerrors // import "golang.org/x/xerrors"
diff --git a/vendor/golang.org/x/xerrors/errors.go b/vendor/golang.org/x/xerrors/errors.go
new file mode 100644
index 0000000000..e88d3772d8
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/errors.go
@@ -0,0 +1,33 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import "fmt"
+
+// errorString is a trivial implementation of error.
+type errorString struct {
+ s string
+ frame Frame
+}
+
+// New returns an error that formats as the given text.
+//
+// The returned error contains a Frame set to the caller's location and
+// implements Formatter to show this information when printed with details.
+func New(text string) error {
+ return &errorString{text, Caller(1)}
+}
+
+func (e *errorString) Error() string {
+ return e.s
+}
+
+func (e *errorString) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *errorString) FormatError(p Printer) (next error) {
+ p.Print(e.s)
+ e.frame.Format(p)
+ return nil
+}
diff --git a/vendor/golang.org/x/xerrors/fmt.go b/vendor/golang.org/x/xerrors/fmt.go
new file mode 100644
index 0000000000..74c1c93ec9
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/fmt.go
@@ -0,0 +1,109 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "fmt"
+ "strings"
+
+ "golang.org/x/xerrors/internal"
+)
+
+// Errorf formats according to a format specifier and returns the string as a
+// value that satisfies error.
+//
+// The returned error includes the file and line number of the caller when
+// formatted with additional detail enabled. If the last argument is an error
+// the returned error's Format method will return it if the format string ends
+// with ": %s", ": %v", or ": %w". If the last argument is an error and the
+// format string ends with ": %w", the returned error implements Wrapper
+// with an Unwrap method returning it.
+func Errorf(format string, a ...interface{}) error {
+ err, wrap := lastError(format, a)
+ format = formatPlusW(format)
+ if err == nil {
+ return &noWrapError{fmt.Sprintf(format, a...), nil, Caller(1)}
+ }
+
+ // TODO: this is not entirely correct. The error value could be
+ // printed elsewhere in format if it mixes numbered with unnumbered
+ // substitutions. With relatively small changes to doPrintf we can
+ // have it optionally ignore extra arguments and pass the argument
+ // list in its entirety.
+ msg := fmt.Sprintf(format[:len(format)-len(": %s")], a[:len(a)-1]...)
+ frame := Frame{}
+ if internal.EnableTrace {
+ frame = Caller(1)
+ }
+ if wrap {
+ return &wrapError{msg, err, frame}
+ }
+ return &noWrapError{msg, err, frame}
+}
+
+// formatPlusW is used to avoid the vet check that will barf at %w.
+func formatPlusW(s string) string {
+ return s
+}
+
+func lastError(format string, a []interface{}) (err error, wrap bool) {
+ wrap = strings.HasSuffix(format, ": %w")
+ if !wrap &&
+ !strings.HasSuffix(format, ": %s") &&
+ !strings.HasSuffix(format, ": %v") {
+ return nil, false
+ }
+
+ if len(a) == 0 {
+ return nil, false
+ }
+
+ err, ok := a[len(a)-1].(error)
+ if !ok {
+ return nil, false
+ }
+
+ return err, wrap
+}
+
+type noWrapError struct {
+ msg string
+ err error
+ frame Frame
+}
+
+func (e *noWrapError) Error() string {
+ return fmt.Sprint(e)
+}
+
+func (e *noWrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *noWrapError) FormatError(p Printer) (next error) {
+ p.Print(e.msg)
+ e.frame.Format(p)
+ return e.err
+}
+
+type wrapError struct {
+ msg string
+ err error
+ frame Frame
+}
+
+func (e *wrapError) Error() string {
+ return fmt.Sprint(e)
+}
+
+func (e *wrapError) Format(s fmt.State, v rune) { FormatError(e, s, v) }
+
+func (e *wrapError) FormatError(p Printer) (next error) {
+ p.Print(e.msg)
+ e.frame.Format(p)
+ return e.err
+}
+
+func (e *wrapError) Unwrap() error {
+ return e.err
+}
diff --git a/vendor/golang.org/x/xerrors/format.go b/vendor/golang.org/x/xerrors/format.go
new file mode 100644
index 0000000000..1bc9c26b97
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/format.go
@@ -0,0 +1,34 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+// A Formatter formats error messages.
+type Formatter interface {
+ error
+
+ // FormatError prints the receiver's first error and returns the next error in
+ // the error chain, if any.
+ FormatError(p Printer) (next error)
+}
+
+// A Printer formats error messages.
+//
+// The most common implementation of Printer is the one provided by package fmt
+// during Printf (as of Go 1.13). Localization packages such as golang.org/x/text/message
+// typically provide their own implementations.
+type Printer interface {
+ // Print appends args to the message output.
+ Print(args ...interface{})
+
+ // Printf writes a formatted string.
+ Printf(format string, args ...interface{})
+
+ // Detail reports whether error detail is requested.
+ // After the first call to Detail, all text written to the Printer
+ // is formatted as additional detail, or ignored when
+ // detail has not been requested.
+ // If Detail returns false, the caller can avoid printing the detail at all.
+ Detail() bool
+}
diff --git a/vendor/golang.org/x/xerrors/frame.go b/vendor/golang.org/x/xerrors/frame.go
new file mode 100644
index 0000000000..0de628ec50
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/frame.go
@@ -0,0 +1,56 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "runtime"
+)
+
+// A Frame contains part of a call stack.
+type Frame struct {
+ // Make room for three PCs: the one we were asked for, what it called,
+ // and possibly a PC for skipPleaseUseCallersFrames. See:
+ // https://go.googlesource.com/go/+/032678e0fb/src/runtime/extern.go#169
+ frames [3]uintptr
+}
+
+// Caller returns a Frame that describes a frame on the caller's stack.
+// The argument skip is the number of frames to skip over.
+// Caller(0) returns the frame for the caller of Caller.
+func Caller(skip int) Frame {
+ var s Frame
+ runtime.Callers(skip+1, s.frames[:])
+ return s
+}
+
+// location reports the file, line, and function of a frame.
+//
+// The returned function may be "" even if file and line are not.
+func (f Frame) location() (function, file string, line int) {
+ frames := runtime.CallersFrames(f.frames[:])
+ if _, ok := frames.Next(); !ok {
+ return "", "", 0
+ }
+ fr, ok := frames.Next()
+ if !ok {
+ return "", "", 0
+ }
+ return fr.Function, fr.File, fr.Line
+}
+
+// Format prints the stack as error detail.
+// It should be called from an error's Format implementation
+// after printing any other error detail.
+func (f Frame) Format(p Printer) {
+ if p.Detail() {
+ function, file, line := f.location()
+ if function != "" {
+ p.Printf("%s\n ", function)
+ }
+ if file != "" {
+ p.Printf("%s:%d\n", file, line)
+ }
+ }
+}
diff --git a/vendor/golang.org/x/xerrors/go.mod b/vendor/golang.org/x/xerrors/go.mod
new file mode 100644
index 0000000000..870d4f612d
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/go.mod
@@ -0,0 +1,3 @@
+module golang.org/x/xerrors
+
+go 1.11
diff --git a/vendor/golang.org/x/xerrors/internal/internal.go b/vendor/golang.org/x/xerrors/internal/internal.go
new file mode 100644
index 0000000000..89f4eca5df
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/internal/internal.go
@@ -0,0 +1,8 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package internal
+
+// EnableTrace indicates whether stack information should be recorded in errors.
+var EnableTrace = true
diff --git a/vendor/golang.org/x/xerrors/wrap.go b/vendor/golang.org/x/xerrors/wrap.go
new file mode 100644
index 0000000000..9a3b510374
--- /dev/null
+++ b/vendor/golang.org/x/xerrors/wrap.go
@@ -0,0 +1,106 @@
+// Copyright 2018 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package xerrors
+
+import (
+ "reflect"
+)
+
+// A Wrapper provides context around another error.
+type Wrapper interface {
+ // Unwrap returns the next error in the error chain.
+ // If there is no next error, Unwrap returns nil.
+ Unwrap() error
+}
+
+// Opaque returns an error with the same error formatting as err
+// but that does not match err and cannot be unwrapped.
+func Opaque(err error) error {
+ return noWrapper{err}
+}
+
+type noWrapper struct {
+ error
+}
+
+func (e noWrapper) FormatError(p Printer) (next error) {
+ if f, ok := e.error.(Formatter); ok {
+ return f.FormatError(p)
+ }
+ p.Print(e.error)
+ return nil
+}
+
+// Unwrap returns the result of calling the Unwrap method on err, if err implements
+// Unwrap. Otherwise, Unwrap returns nil.
+func Unwrap(err error) error {
+ u, ok := err.(Wrapper)
+ if !ok {
+ return nil
+ }
+ return u.Unwrap()
+}
+
+// Is reports whether any error in err's chain matches target.
+//
+// An error is considered to match a target if it is equal to that target or if
+// it implements a method Is(error) bool such that Is(target) returns true.
+func Is(err, target error) bool {
+ if target == nil {
+ return err == target
+ }
+
+ isComparable := reflect.TypeOf(target).Comparable()
+ for {
+ if isComparable && err == target {
+ return true
+ }
+ if x, ok := err.(interface{ Is(error) bool }); ok && x.Is(target) {
+ return true
+ }
+ // TODO: consider supporing target.Is(err). This would allow
+ // user-definable predicates, but also may allow for coping with sloppy
+ // APIs, thereby making it easier to get away with them.
+ if err = Unwrap(err); err == nil {
+ return false
+ }
+ }
+}
+
+// As finds the first error in err's chain that matches the type to which target
+// points, and if so, sets the target to its value and returns true. An error
+// matches a type if it is assignable to the target type, or if it has a method
+// As(interface{}) bool such that As(target) returns true. As will panic if target
+// is not a non-nil pointer to a type which implements error or is of interface type.
+//
+// The As method should set the target to its value and return true if err
+// matches the type to which target points.
+func As(err error, target interface{}) bool {
+ if target == nil {
+ panic("errors: target cannot be nil")
+ }
+ val := reflect.ValueOf(target)
+ typ := val.Type()
+ if typ.Kind() != reflect.Ptr || val.IsNil() {
+ panic("errors: target must be a non-nil pointer")
+ }
+ if e := typ.Elem(); e.Kind() != reflect.Interface && !e.Implements(errorType) {
+ panic("errors: *target must be interface or implement error")
+ }
+ targetType := typ.Elem()
+ for err != nil {
+ if reflect.TypeOf(err).AssignableTo(targetType) {
+ val.Elem().Set(reflect.ValueOf(err))
+ return true
+ }
+ if x, ok := err.(interface{ As(interface{}) bool }); ok && x.As(target) {
+ return true
+ }
+ err = Unwrap(err)
+ }
+ return false
+}
+
+var errorType = reflect.TypeOf((*error)(nil)).Elem()
diff --git a/vendor/modules.txt b/vendor/modules.txt
index 98ee2f9b12..fd2108874f 100644
--- a/vendor/modules.txt
+++ b/vendor/modules.txt
@@ -46,6 +46,7 @@ gitea.com/macaron/session/redis
## explicit
gitea.com/macaron/toolbox
# github.com/BurntSushi/toml v0.3.1
+## explicit
github.com/BurntSushi/toml
# github.com/PuerkitoBio/goquery v1.5.0
## explicit
@@ -184,6 +185,10 @@ github.com/ethantkoenig/rupture
## explicit
# github.com/facebookgo/subset v0.0.0-20150612182917-8dac2c3c4870
## explicit
+# github.com/fatih/color v1.9.0
+github.com/fatih/color
+# github.com/fatih/structtag v1.2.0
+github.com/fatih/structtag
# github.com/fsnotify/fsnotify v1.4.7
github.com/fsnotify/fsnotify
# github.com/gliderlabs/ssh v0.2.2
@@ -445,11 +450,15 @@ github.com/markbates/goth/providers/nextcloud
github.com/markbates/goth/providers/openidConnect
github.com/markbates/goth/providers/twitter
github.com/markbates/goth/providers/yandex
-# github.com/mattn/go-isatty v0.0.7
+# github.com/mattn/go-colorable v0.1.4
+github.com/mattn/go-colorable
+# github.com/mattn/go-isatty v0.0.11
## explicit
github.com/mattn/go-isatty
# github.com/mattn/go-oci8 v0.0.0-20190320171441-14ba190cf52d
## explicit
+# github.com/mattn/go-runewidth v0.0.7
+github.com/mattn/go-runewidth
# github.com/mattn/go-sqlite3 v1.11.0
## explicit
github.com/mattn/go-sqlite3
@@ -458,10 +467,19 @@ github.com/matttproud/golang_protobuf_extensions/pbutil
# github.com/mcuadros/go-version v0.0.0-20190308113854-92cdf37c5b75
## explicit
github.com/mcuadros/go-version
+# github.com/mgechev/dots v0.0.0-20190921121421-c36f7dcfbb81
+## explicit
+github.com/mgechev/dots
+# github.com/mgechev/revive v1.0.2
+## explicit
+github.com/mgechev/revive/formatter
+github.com/mgechev/revive/lint
+github.com/mgechev/revive/rule
# github.com/microcosm-cc/bluemonday v0.0.0-20161012083705-f77f16ffc87a
## explicit
github.com/microcosm-cc/bluemonday
# github.com/mitchellh/go-homedir v1.1.0
+## explicit
github.com/mitchellh/go-homedir
# github.com/mitchellh/mapstructure v1.1.2
github.com/mitchellh/mapstructure
@@ -478,6 +496,8 @@ github.com/nfnt/resize
# github.com/niklasfasching/go-org v0.1.9
## explicit
github.com/niklasfasching/go-org/org
+# github.com/olekukonko/tablewriter v0.0.4
+github.com/olekukonko/tablewriter
# github.com/oliamb/cutter v0.2.2
## explicit
github.com/oliamb/cutter
@@ -490,7 +510,7 @@ github.com/olivere/elastic/v7/uritemplates
github.com/pelletier/go-toml
# github.com/philhofer/fwd v1.0.0
github.com/philhofer/fwd
-# github.com/pkg/errors v0.8.1
+# github.com/pkg/errors v0.9.1
## explicit
github.com/pkg/errors
# github.com/pmezard/go-difflib v1.0.0
@@ -661,6 +681,9 @@ golang.org/x/crypto/ssh
golang.org/x/crypto/ssh/agent
golang.org/x/crypto/ssh/internal/bcrypt_pbkdf
golang.org/x/crypto/ssh/knownhosts
+# golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee
+golang.org/x/mod/module
+golang.org/x/mod/semver
# golang.org/x/net v0.0.0-20200301022130-244492dfa37a
## explicit
golang.org/x/net/context
@@ -708,8 +731,7 @@ golang.org/x/text/transform
golang.org/x/text/unicode/bidi
golang.org/x/text/unicode/norm
golang.org/x/text/width
-# golang.org/x/tools v0.0.0-20191213221258-04c2e8eff935
-## explicit
+# golang.org/x/tools v0.0.0-20200225230052-807dcd883420
golang.org/x/tools/go/ast/astutil
golang.org/x/tools/go/buildutil
golang.org/x/tools/go/gcexportdata
@@ -720,10 +742,13 @@ golang.org/x/tools/go/loader
golang.org/x/tools/go/packages
golang.org/x/tools/imports
golang.org/x/tools/internal/fastwalk
+golang.org/x/tools/internal/gocommand
golang.org/x/tools/internal/gopathwalk
golang.org/x/tools/internal/imports
-golang.org/x/tools/internal/module
-golang.org/x/tools/internal/semver
+golang.org/x/tools/internal/packagesinternal
+# golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898
+golang.org/x/xerrors
+golang.org/x/xerrors/internal
# google.golang.org/appengine v1.6.4
google.golang.org/appengine
google.golang.org/appengine/cloudsql