Add golangci-lint (#502)

Initial part of #435
This commit is contained in:
Lukas 2021-11-14 21:01:54 +01:00 committed by GitHub
parent ca8e215cfa
commit c28f7cb29f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
1662 changed files with 298368 additions and 14 deletions

10
.golangci.yml Normal file
View file

@ -0,0 +1,10 @@
linters-settings:
gofmt:
simplify: false
linters:
disable-all: true
enable:
- gofmt
- goimports
- govet

View file

@ -63,9 +63,7 @@ pipeline:
group: test
commands:
- make test
- make vet
- make lint
- make formatcheck
test-postgres:
image: golang:1.16

View file

@ -25,9 +25,6 @@ vendor:
go mod tidy
go mod vendor
formatcheck:
@([ -z "$(shell gofmt -d $(GOFILES_NOVENDOR) | head)" ]) || (echo "Source is unformatted"; exit 1)
format:
@gofmt -w ${GOFILES_NOVENDOR}
@ -36,13 +33,10 @@ clean:
go clean -i ./...
rm -rf build
.PHONY: vet
vet:
@echo "Running go vet..."
@go vet $(GO_PACKAGES)
.PHONY: lint
lint:
@echo "Running golangci-lint"
go run vendor/github.com/golangci/golangci-lint/cmd/golangci-lint/main.go run
@echo "Running zerolog linter"
go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/agent
go run vendor/github.com/rs/zerolog/cmd/lint/lint.go github.com/woodpecker-ci/woodpecker/cmd/cli

1
go.mod
View file

@ -23,6 +23,7 @@ require (
github.com/go-sql-driver/mysql v1.6.0
github.com/gogits/go-gogs-client v0.0.0-20210131175652-1d7215cd8d85
github.com/golang-jwt/jwt/v4 v4.1.0
github.com/golangci/golangci-lint v1.42.1
github.com/google/go-github/v39 v39.2.0
github.com/gorilla/securecookie v1.1.1
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect

524
go.sum

File diff suppressed because it is too large Load diff

View file

@ -5,5 +5,6 @@
package tools
import (
_ "github.com/golangci/golangci-lint/cmd/golangci-lint"
_ "github.com/rs/zerolog/cmd/lint"
)

21
vendor/4d63.com/gochecknoglobals/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Leigh McCulloch
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,154 @@
package checknoglobals
import (
"flag"
"fmt"
"go/ast"
"go/token"
"strings"
"golang.org/x/tools/go/analysis"
)
// allowedExpression is a struct representing packages and methods that will
// be an allowed combination to use as a global variable, f.ex. Name `regexp`
// and SelName `MustCompile`.
type allowedExpression struct {
Name string
SelName string
}
const Doc = `check that no global variables exist
This analyzer checks for global variables and errors on any found.
A global variable is a variable declared in package scope and that can be read
and written to by any function within the package. Global variables can cause
side effects which are difficult to keep track of. A code in one function may
change the variables state while another unrelated chunk of code may be
effected by it.`
// Analyzer provides an Analyzer that checks that there are no global
// variables, except for errors and variables containing regular
// expressions.
func Analyzer() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "gochecknoglobals",
Doc: Doc,
Run: checkNoGlobals,
Flags: flags(),
RunDespiteErrors: true,
}
}
func flags() flag.FlagSet {
flags := flag.NewFlagSet("", flag.ExitOnError)
flags.Bool("t", false, "Include tests")
return *flags
}
func isAllowed(v ast.Node) bool {
switch i := v.(type) {
case *ast.Ident:
return i.Name == "_" || i.Name == "version" || looksLikeError(i)
case *ast.CallExpr:
if expr, ok := i.Fun.(*ast.SelectorExpr); ok {
return isAllowedSelectorExpression(expr)
}
case *ast.CompositeLit:
if expr, ok := i.Type.(*ast.SelectorExpr); ok {
return isAllowedSelectorExpression(expr)
}
}
return false
}
func isAllowedSelectorExpression(v *ast.SelectorExpr) bool {
x, ok := v.X.(*ast.Ident)
if !ok {
return false
}
allowList := []allowedExpression{
{Name: "regexp", SelName: "MustCompile"},
}
for _, i := range allowList {
if x.Name == i.Name && v.Sel.Name == i.SelName {
return true
}
}
return false
}
// looksLikeError returns true if the AST identifier starts
// with 'err' or 'Err', or false otherwise.
//
// TODO: https://github.com/leighmcculloch/gochecknoglobals/issues/5
func looksLikeError(i *ast.Ident) bool {
prefix := "err"
if i.IsExported() {
prefix = "Err"
}
return strings.HasPrefix(i.Name, prefix)
}
func checkNoGlobals(pass *analysis.Pass) (interface{}, error) {
includeTests := pass.Analyzer.Flags.Lookup("t").Value.(flag.Getter).Get().(bool)
for _, file := range pass.Files {
filename := pass.Fset.Position(file.Pos()).Filename
if !strings.HasSuffix(filename, ".go") {
continue
}
if !includeTests && strings.HasSuffix(filename, "_test.go") {
continue
}
for _, decl := range file.Decls {
genDecl, ok := decl.(*ast.GenDecl)
if !ok {
continue
}
if genDecl.Tok != token.VAR {
continue
}
for _, spec := range genDecl.Specs {
valueSpec := spec.(*ast.ValueSpec)
onlyAllowedValues := false
for _, vn := range valueSpec.Values {
if isAllowed(vn) {
onlyAllowedValues = true
continue
}
onlyAllowedValues = false
break
}
if onlyAllowedValues {
continue
}
for _, vn := range valueSpec.Names {
if isAllowed(vn) {
continue
}
message := fmt.Sprintf("%s is a global variable", vn.Name)
pass.Report(analysis.Diagnostic{
Pos: vn.Pos(),
Category: "global",
Message: message,
})
}
}
}
}
return nil, nil
}

21
vendor/github.com/Antonboom/errname/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Anton Telyshev
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,134 @@
package analyzer
import (
"go/ast"
"go/token"
"strconv"
"strings"
"unicode"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
)
// New returns new errname analyzer.
func New() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "errname",
Doc: "Checks that sentinel errors are prefixed with the `Err` and error types are suffixed with the `Error`.",
Run: run,
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
}
type stringSet = map[string]struct{}
var (
imports = []ast.Node{(*ast.ImportSpec)(nil)}
types = []ast.Node{(*ast.TypeSpec)(nil)}
funcs = []ast.Node{(*ast.FuncDecl)(nil)}
)
func run(pass *analysis.Pass) (interface{}, error) {
insp := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
pkgAliases := map[string]string{}
insp.Preorder(imports, func(node ast.Node) {
i := node.(*ast.ImportSpec)
if n := i.Name; n != nil && i.Path != nil {
if path, err := strconv.Unquote(i.Path.Value); err == nil {
pkgAliases[n.Name] = getPkgFromPath(path)
}
}
})
allTypes := stringSet{}
typesSpecs := map[string]*ast.TypeSpec{}
insp.Preorder(types, func(node ast.Node) {
t := node.(*ast.TypeSpec)
allTypes[t.Name.Name] = struct{}{}
typesSpecs[t.Name.Name] = t
})
errorTypes := stringSet{}
insp.Preorder(funcs, func(node ast.Node) {
f := node.(*ast.FuncDecl)
t, ok := isMethodError(f)
if !ok {
return
}
errorTypes[t] = struct{}{}
tSpec, ok := typesSpecs[t]
if !ok {
panic("no specification for type " + t)
}
if _, ok := tSpec.Type.(*ast.ArrayType); ok {
if !isValidErrorArrayTypeName(t) {
reportAboutErrorType(pass, tSpec.Pos(), t, true)
}
} else if !isValidErrorTypeName(t) {
reportAboutErrorType(pass, tSpec.Pos(), t, false)
}
})
errorFuncs := stringSet{}
insp.Preorder(funcs, func(node ast.Node) {
f := node.(*ast.FuncDecl)
if isFuncReturningErr(f.Type, allTypes, errorTypes) {
errorFuncs[f.Name.Name] = struct{}{}
}
})
inspectPkgLevelVarsOnly := func(node ast.Node) bool {
switch v := node.(type) {
case *ast.FuncDecl:
return false
case *ast.ValueSpec:
if name, ok := isSentinelError(v, pkgAliases, allTypes, errorTypes, errorFuncs); ok && !isValidErrorVarName(name) {
reportAboutErrorVar(pass, v.Pos(), name)
}
}
return true
}
for _, f := range pass.Files {
ast.Inspect(f, inspectPkgLevelVarsOnly)
}
return nil, nil
}
func reportAboutErrorType(pass *analysis.Pass, typePos token.Pos, typeName string, isArrayType bool) {
var form string
if unicode.IsLower([]rune(typeName)[0]) {
form = "xxxError"
} else {
form = "XxxError"
}
if isArrayType {
form += "s"
}
pass.Reportf(typePos, "the type name `%s` should conform to the `%s` format", typeName, form)
}
func reportAboutErrorVar(pass *analysis.Pass, pos token.Pos, varName string) {
var form string
if unicode.IsLower([]rune(varName)[0]) {
form = "errXxx"
} else {
form = "ErrXxx"
}
pass.Reportf(pos, "the variable name `%s` should conform to the `%s` format", varName, form)
}
func getPkgFromPath(p string) string {
idx := strings.LastIndex(p, "/")
if idx == -1 {
return p
}
return p[idx+1:]
}

View file

@ -0,0 +1,237 @@
package analyzer
import (
"go/ast"
"go/token"
"strings"
"unicode"
)
func isMethodError(f *ast.FuncDecl) (typeName string, ok bool) {
if f.Recv == nil {
return "", false
}
if f.Name.Name != "Error" {
return "", false
}
if f.Type == nil || f.Type.Results == nil || len(f.Type.Results.List) != 1 {
return "", false
}
returnType, ok := f.Type.Results.List[0].Type.(*ast.Ident)
if !ok {
return "", false
}
var receiverType string
switch rt := f.Recv.List[0].Type.(type) {
case *ast.Ident:
receiverType = rt.Name
case *ast.StarExpr:
if i, ok := rt.X.(*ast.Ident); ok {
receiverType = i.Name
}
}
return receiverType, returnType.Name == "string"
}
func isValidErrorTypeName(s string) bool {
if isInitialism(s) {
return true
}
words := split(s)
wordsCnt := wordsCount(words)
if wordsCnt["error"] != 1 {
return false
}
return words[len(words)-1] == "error"
}
func isValidErrorArrayTypeName(s string) bool {
if isInitialism(s) {
return true
}
words := split(s)
wordsCnt := wordsCount(words)
if wordsCnt["errors"] != 1 {
return false
}
return words[len(words)-1] == "errors"
}
func isFuncReturningErr(fType *ast.FuncType, allTypes, errorTypes stringSet) bool {
if fType == nil || fType.Results == nil || len(fType.Results.List) != 1 {
return false
}
var returnTypeName string
switch rt := fType.Results.List[0].Type.(type) {
case *ast.Ident:
returnTypeName = rt.Name
case *ast.StarExpr:
if i, ok := rt.X.(*ast.Ident); ok {
returnTypeName = i.Name
}
}
return isErrorType(returnTypeName, allTypes, errorTypes)
}
func isErrorType(tName string, allTypes, errorTypes stringSet) bool {
_, isUserType := allTypes[tName]
_, isErrType := errorTypes[tName]
return isErrType || (tName == "error" && !isUserType)
}
var knownErrConstructors = stringSet{
"fmt.Errorf": {},
"errors.Errorf": {},
"errors.New": {},
"errors.Newf": {},
"errors.NewWithDepth": {},
"errors.NewWithDepthf": {},
"errors.NewAssertionErrorWithWrappedErrf": {},
}
func isSentinelError( //nolint:gocognit
v *ast.ValueSpec,
pkgAliases map[string]string,
allTypes, errorTypes, errorFuncs stringSet,
) (varName string, ok bool) {
if len(v.Names) != 1 {
return "", false
}
varName = v.Names[0].Name
switch vv := v.Type.(type) {
// var ErrEndOfFile error
// var ErrEndOfFile SomeErrType
case *ast.Ident:
if isErrorType(vv.Name, allTypes, errorTypes) {
return varName, true
}
// var ErrEndOfFile *SomeErrType
case *ast.StarExpr:
if i, ok := vv.X.(*ast.Ident); ok && isErrorType(i.Name, allTypes, errorTypes) {
return varName, true
}
}
if len(v.Values) != 1 {
return "", false
}
switch vv := v.Values[0].(type) {
case *ast.CallExpr:
switch fun := vv.Fun.(type) {
// var ErrEndOfFile = errors.New("end of file")
case *ast.SelectorExpr:
pkg, ok := fun.X.(*ast.Ident)
if !ok {
return "", false
}
pkgFun := fun.Sel
pkgName := pkg.Name
if a, ok := pkgAliases[pkgName]; ok {
pkgName = a
}
_, ok = knownErrConstructors[pkgName+"."+pkgFun.Name]
return varName, ok
// var ErrEndOfFile = newErrEndOfFile()
// var ErrEndOfFile = new(EndOfFileError)
// const ErrEndOfFile = constError("end of file")
case *ast.Ident:
if isErrorType(fun.Name, allTypes, errorTypes) {
return varName, true
}
if _, ok := errorFuncs[fun.Name]; ok {
return varName, true
}
if fun.Name == "new" && len(vv.Args) == 1 {
if i, ok := vv.Args[0].(*ast.Ident); ok {
return varName, isErrorType(i.Name, allTypes, errorTypes)
}
}
// var ErrEndOfFile = func() error { ... }
case *ast.FuncLit:
return varName, isFuncReturningErr(fun.Type, allTypes, errorTypes)
}
// var ErrEndOfFile = &EndOfFileError{}
case *ast.UnaryExpr:
if vv.Op == token.AND { // &
if lit, ok := vv.X.(*ast.CompositeLit); ok {
if i, ok := lit.Type.(*ast.Ident); ok {
return varName, isErrorType(i.Name, allTypes, errorTypes)
}
}
}
// var ErrEndOfFile = EndOfFileError{}
case *ast.CompositeLit:
if i, ok := vv.Type.(*ast.Ident); ok {
return varName, isErrorType(i.Name, allTypes, errorTypes)
}
}
return "", false
}
func isValidErrorVarName(s string) bool {
if isInitialism(s) {
return true
}
words := split(s)
wordsCnt := wordsCount(words)
if wordsCnt["err"] != 1 {
return false
}
return words[0] == "err"
}
func isInitialism(s string) bool {
return strings.ToLower(s) == s || strings.ToUpper(s) == s
}
func split(s string) []string {
var words []string
ss := []rune(s)
var b strings.Builder
b.WriteRune(ss[0])
for _, r := range ss[1:] {
if unicode.IsUpper(r) {
words = append(words, strings.ToLower(b.String()))
b.Reset()
}
b.WriteRune(r)
}
words = append(words, strings.ToLower(b.String()))
return words
}
func wordsCount(w []string) map[string]int {
result := make(map[string]int, len(w))
for _, ww := range w {
result[ww]++
}
return result
}

2
vendor/github.com/BurntSushi/toml/.gitignore generated vendored Normal file
View file

@ -0,0 +1,2 @@
toml.test
/toml-test

1
vendor/github.com/BurntSushi/toml/COMPATIBLE generated vendored Normal file
View file

@ -0,0 +1 @@
Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0).

21
vendor/github.com/BurntSushi/toml/COPYING generated vendored Normal file
View file

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2013 TOML authors
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

220
vendor/github.com/BurntSushi/toml/README.md generated vendored Normal file
View file

@ -0,0 +1,220 @@
## TOML parser and encoder for Go with reflection
TOML stands for Tom's Obvious, Minimal Language. This Go package provides a
reflection interface similar to Go's standard library `json` and `xml`
packages. This package also supports the `encoding.TextUnmarshaler` and
`encoding.TextMarshaler` interfaces so that you can define custom data
representations. (There is an example of this below.)
Compatible with TOML version [v1.0.0](https://toml.io/en/v1.0.0).
Documentation: https://godocs.io/github.com/BurntSushi/toml
See the [releases page](https://github.com/BurntSushi/toml/releases) for a
changelog; this information is also in the git tag annotations (e.g. `git show
v0.4.0`).
This library requires Go 1.13 or newer; install it with:
$ go get github.com/BurntSushi/toml
It also comes with a TOML validator CLI tool:
$ go get github.com/BurntSushi/toml/cmd/tomlv
$ tomlv some-toml-file.toml
### Testing
This package passes all tests in
[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder
and the encoder.
### Examples
This package works similarly to how the Go standard library handles XML and
JSON. Namely, data is loaded into Go values via reflection.
For the simplest example, consider some TOML file as just a list of keys
and values:
```toml
Age = 25
Cats = [ "Cauchy", "Plato" ]
Pi = 3.14
Perfection = [ 6, 28, 496, 8128 ]
DOB = 1987-07-05T05:45:00Z
```
Which could be defined in Go as:
```go
type Config struct {
Age int
Cats []string
Pi float64
Perfection []int
DOB time.Time // requires `import time`
}
```
And then decoded with:
```go
var conf Config
if _, err := toml.Decode(tomlData, &conf); err != nil {
// handle error
}
```
You can also use struct tags if your struct field name doesn't map to a TOML
key value directly:
```toml
some_key_NAME = "wat"
```
```go
type TOML struct {
ObscureKey string `toml:"some_key_NAME"`
}
```
Beware that like other most other decoders **only exported fields** are
considered when encoding and decoding; private fields are silently ignored.
### Using the `encoding.TextUnmarshaler` interface
Here's an example that automatically parses duration strings into
`time.Duration` values:
```toml
[[song]]
name = "Thunder Road"
duration = "4m49s"
[[song]]
name = "Stairway to Heaven"
duration = "8m03s"
```
Which can be decoded with:
```go
type song struct {
Name string
Duration duration
}
type songs struct {
Song []song
}
var favorites songs
if _, err := toml.Decode(blob, &favorites); err != nil {
log.Fatal(err)
}
for _, s := range favorites.Song {
fmt.Printf("%s (%s)\n", s.Name, s.Duration)
}
```
And you'll also need a `duration` type that satisfies the
`encoding.TextUnmarshaler` interface:
```go
type duration struct {
time.Duration
}
func (d *duration) UnmarshalText(text []byte) error {
var err error
d.Duration, err = time.ParseDuration(string(text))
return err
}
```
To target TOML specifically you can implement `UnmarshalTOML` TOML interface in
a similar way.
### More complex usage
Here's an example of how to load the example from the official spec page:
```toml
# This is a TOML document. Boom.
title = "TOML Example"
[owner]
name = "Tom Preston-Werner"
organization = "GitHub"
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
[database]
server = "192.168.1.1"
ports = [ 8001, 8001, 8002 ]
connection_max = 5000
enabled = true
[servers]
# You can indent as you please. Tabs or spaces. TOML don't care.
[servers.alpha]
ip = "10.0.0.1"
dc = "eqdc10"
[servers.beta]
ip = "10.0.0.2"
dc = "eqdc10"
[clients]
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
# Line breaks are OK when inside arrays
hosts = [
"alpha",
"omega"
]
```
And the corresponding Go types are:
```go
type tomlConfig struct {
Title string
Owner ownerInfo
DB database `toml:"database"`
Servers map[string]server
Clients clients
}
type ownerInfo struct {
Name string
Org string `toml:"organization"`
Bio string
DOB time.Time
}
type database struct {
Server string
Ports []int
ConnMax int `toml:"connection_max"`
Enabled bool
}
type server struct {
IP string
DC string
}
type clients struct {
Data [][]interface{}
Hosts []string
}
```
Note that a case insensitive match will be tried if an exact match can't be
found.
A working example of the above can be found in `_examples/example.{go,toml}`.

511
vendor/github.com/BurntSushi/toml/decode.go generated vendored Normal file
View file

@ -0,0 +1,511 @@
package toml
import (
"encoding"
"fmt"
"io"
"io/ioutil"
"math"
"os"
"reflect"
"strings"
"time"
)
// Unmarshaler is the interface implemented by objects that can unmarshal a
// TOML description of themselves.
type Unmarshaler interface {
UnmarshalTOML(interface{}) error
}
// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`.
func Unmarshal(p []byte, v interface{}) error {
_, err := Decode(string(p), v)
return err
}
// Primitive is a TOML value that hasn't been decoded into a Go value.
//
// This type can be used for any value, which will cause decoding to be delayed.
// You can use the PrimitiveDecode() function to "manually" decode these values.
//
// NOTE: The underlying representation of a `Primitive` value is subject to
// change. Do not rely on it.
//
// NOTE: Primitive values are still parsed, so using them will only avoid the
// overhead of reflection. They can be useful when you don't know the exact type
// of TOML data until runtime.
type Primitive struct {
undecoded interface{}
context Key
}
// PrimitiveDecode is just like the other `Decode*` functions, except it
// decodes a TOML value that has already been parsed. Valid primitive values
// can *only* be obtained from values filled by the decoder functions,
// including this method. (i.e., `v` may contain more `Primitive`
// values.)
//
// Meta data for primitive values is included in the meta data returned by
// the `Decode*` functions with one exception: keys returned by the Undecoded
// method will only reflect keys that were decoded. Namely, any keys hidden
// behind a Primitive will be considered undecoded. Executing this method will
// update the undecoded keys in the meta data. (See the example.)
func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error {
md.context = primValue.context
defer func() { md.context = nil }()
return md.unify(primValue.undecoded, rvalue(v))
}
// Decoder decodes TOML data.
//
// TOML tables correspond to Go structs or maps (dealer's choice they can be
// used interchangeably).
//
// TOML table arrays correspond to either a slice of structs or a slice of maps.
//
// TOML datetimes correspond to Go time.Time values. Local datetimes are parsed
// in the local timezone.
//
// All other TOML types (float, string, int, bool and array) correspond to the
// obvious Go types.
//
// An exception to the above rules is if a type implements the TextUnmarshaler
// interface, in which case any primitive TOML value (floats, strings, integers,
// booleans, datetimes) will be converted to a []byte and given to the value's
// UnmarshalText method. See the Unmarshaler example for a demonstration with
// time duration strings.
//
// Key mapping
//
// TOML keys can map to either keys in a Go map or field names in a Go struct.
// The special `toml` struct tag can be used to map TOML keys to struct fields
// that don't match the key name exactly (see the example). A case insensitive
// match to struct names will be tried if an exact match can't be found.
//
// The mapping between TOML values and Go values is loose. That is, there may
// exist TOML values that cannot be placed into your representation, and there
// may be parts of your representation that do not correspond to TOML values.
// This loose mapping can be made stricter by using the IsDefined and/or
// Undecoded methods on the MetaData returned.
//
// This decoder does not handle cyclic types. Decode will not terminate if a
// cyclic type is passed.
type Decoder struct {
r io.Reader
}
// NewDecoder creates a new Decoder.
func NewDecoder(r io.Reader) *Decoder {
return &Decoder{r: r}
}
// Decode TOML data in to the pointer `v`.
func (dec *Decoder) Decode(v interface{}) (MetaData, error) {
rv := reflect.ValueOf(v)
if rv.Kind() != reflect.Ptr {
return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v))
}
if rv.IsNil() {
return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v))
}
// TODO: have parser should read from io.Reader? Or at the very least, make
// it read from []byte rather than string
data, err := ioutil.ReadAll(dec.r)
if err != nil {
return MetaData{}, err
}
p, err := parse(string(data))
if err != nil {
return MetaData{}, err
}
md := MetaData{
p.mapping, p.types, p.ordered,
make(map[string]bool, len(p.ordered)), nil,
}
return md, md.unify(p.mapping, indirect(rv))
}
// Decode the TOML data in to the pointer v.
//
// See the documentation on Decoder for a description of the decoding process.
func Decode(data string, v interface{}) (MetaData, error) {
return NewDecoder(strings.NewReader(data)).Decode(v)
}
// DecodeFile is just like Decode, except it will automatically read the
// contents of the file at path and decode it for you.
func DecodeFile(path string, v interface{}) (MetaData, error) {
fp, err := os.Open(path)
if err != nil {
return MetaData{}, err
}
defer fp.Close()
return NewDecoder(fp).Decode(v)
}
// unify performs a sort of type unification based on the structure of `rv`,
// which is the client representation.
//
// Any type mismatch produces an error. Finding a type that we don't know
// how to handle produces an unsupported type error.
func (md *MetaData) unify(data interface{}, rv reflect.Value) error {
// Special case. Look for a `Primitive` value.
// TODO: #76 would make this superfluous after implemented.
if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() {
// Save the undecoded data and the key context into the primitive
// value.
context := make(Key, len(md.context))
copy(context, md.context)
rv.Set(reflect.ValueOf(Primitive{
undecoded: data,
context: context,
}))
return nil
}
// Special case. Unmarshaler Interface support.
if rv.CanAddr() {
if v, ok := rv.Addr().Interface().(Unmarshaler); ok {
return v.UnmarshalTOML(data)
}
}
// Special case. Look for a value satisfying the TextUnmarshaler interface.
if v, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
return md.unifyText(data, v)
}
// TODO:
// The behavior here is incorrect whenever a Go type satisfies the
// encoding.TextUnmarshaler interface but also corresponds to a TOML hash or
// array. In particular, the unmarshaler should only be applied to primitive
// TOML values. But at this point, it will be applied to all kinds of values
// and produce an incorrect error whenever those values are hashes or arrays
// (including arrays of tables).
k := rv.Kind()
// laziness
if k >= reflect.Int && k <= reflect.Uint64 {
return md.unifyInt(data, rv)
}
switch k {
case reflect.Ptr:
elem := reflect.New(rv.Type().Elem())
err := md.unify(data, reflect.Indirect(elem))
if err != nil {
return err
}
rv.Set(elem)
return nil
case reflect.Struct:
return md.unifyStruct(data, rv)
case reflect.Map:
return md.unifyMap(data, rv)
case reflect.Array:
return md.unifyArray(data, rv)
case reflect.Slice:
return md.unifySlice(data, rv)
case reflect.String:
return md.unifyString(data, rv)
case reflect.Bool:
return md.unifyBool(data, rv)
case reflect.Interface:
// we only support empty interfaces.
if rv.NumMethod() > 0 {
return e("unsupported type %s", rv.Type())
}
return md.unifyAnything(data, rv)
case reflect.Float32:
fallthrough
case reflect.Float64:
return md.unifyFloat64(data, rv)
}
return e("unsupported type %s", rv.Kind())
}
func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error {
tmap, ok := mapping.(map[string]interface{})
if !ok {
if mapping == nil {
return nil
}
return e("type mismatch for %s: expected table but found %T",
rv.Type().String(), mapping)
}
for key, datum := range tmap {
var f *field
fields := cachedTypeFields(rv.Type())
for i := range fields {
ff := &fields[i]
if ff.name == key {
f = ff
break
}
if f == nil && strings.EqualFold(ff.name, key) {
f = ff
}
}
if f != nil {
subv := rv
for _, i := range f.index {
subv = indirect(subv.Field(i))
}
if isUnifiable(subv) {
md.decoded[md.context.add(key).String()] = true
md.context = append(md.context, key)
if err := md.unify(datum, subv); err != nil {
return err
}
md.context = md.context[0 : len(md.context)-1]
} else if f.name != "" {
// Bad user! No soup for you!
return e("cannot write unexported field %s.%s",
rv.Type().String(), f.name)
}
}
}
return nil
}
func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error {
if k := rv.Type().Key().Kind(); k != reflect.String {
return fmt.Errorf(
"toml: cannot decode to a map with non-string key type (%s in %q)",
k, rv.Type())
}
tmap, ok := mapping.(map[string]interface{})
if !ok {
if tmap == nil {
return nil
}
return badtype("map", mapping)
}
if rv.IsNil() {
rv.Set(reflect.MakeMap(rv.Type()))
}
for k, v := range tmap {
md.decoded[md.context.add(k).String()] = true
md.context = append(md.context, k)
rvkey := indirect(reflect.New(rv.Type().Key()))
rvval := reflect.Indirect(reflect.New(rv.Type().Elem()))
if err := md.unify(v, rvval); err != nil {
return err
}
md.context = md.context[0 : len(md.context)-1]
rvkey.SetString(k)
rv.SetMapIndex(rvkey, rvval)
}
return nil
}
func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error {
datav := reflect.ValueOf(data)
if datav.Kind() != reflect.Slice {
if !datav.IsValid() {
return nil
}
return badtype("slice", data)
}
if l := datav.Len(); l != rv.Len() {
return e("expected array length %d; got TOML array of length %d", rv.Len(), l)
}
return md.unifySliceArray(datav, rv)
}
func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error {
datav := reflect.ValueOf(data)
if datav.Kind() != reflect.Slice {
if !datav.IsValid() {
return nil
}
return badtype("slice", data)
}
n := datav.Len()
if rv.IsNil() || rv.Cap() < n {
rv.Set(reflect.MakeSlice(rv.Type(), n, n))
}
rv.SetLen(n)
return md.unifySliceArray(datav, rv)
}
func (md *MetaData) unifySliceArray(data, rv reflect.Value) error {
l := data.Len()
for i := 0; i < l; i++ {
err := md.unify(data.Index(i).Interface(), indirect(rv.Index(i)))
if err != nil {
return err
}
}
return nil
}
func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error {
if _, ok := data.(time.Time); ok {
rv.Set(reflect.ValueOf(data))
return nil
}
return badtype("time.Time", data)
}
func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error {
if s, ok := data.(string); ok {
rv.SetString(s)
return nil
}
return badtype("string", data)
}
func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error {
if num, ok := data.(float64); ok {
switch rv.Kind() {
case reflect.Float32:
fallthrough
case reflect.Float64:
rv.SetFloat(num)
default:
panic("bug")
}
return nil
}
return badtype("float", data)
}
func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error {
if num, ok := data.(int64); ok {
if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 {
switch rv.Kind() {
case reflect.Int, reflect.Int64:
// No bounds checking necessary.
case reflect.Int8:
if num < math.MinInt8 || num > math.MaxInt8 {
return e("value %d is out of range for int8", num)
}
case reflect.Int16:
if num < math.MinInt16 || num > math.MaxInt16 {
return e("value %d is out of range for int16", num)
}
case reflect.Int32:
if num < math.MinInt32 || num > math.MaxInt32 {
return e("value %d is out of range for int32", num)
}
}
rv.SetInt(num)
} else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 {
unum := uint64(num)
switch rv.Kind() {
case reflect.Uint, reflect.Uint64:
// No bounds checking necessary.
case reflect.Uint8:
if num < 0 || unum > math.MaxUint8 {
return e("value %d is out of range for uint8", num)
}
case reflect.Uint16:
if num < 0 || unum > math.MaxUint16 {
return e("value %d is out of range for uint16", num)
}
case reflect.Uint32:
if num < 0 || unum > math.MaxUint32 {
return e("value %d is out of range for uint32", num)
}
}
rv.SetUint(unum)
} else {
panic("unreachable")
}
return nil
}
return badtype("integer", data)
}
func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error {
if b, ok := data.(bool); ok {
rv.SetBool(b)
return nil
}
return badtype("boolean", data)
}
func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error {
rv.Set(reflect.ValueOf(data))
return nil
}
func (md *MetaData) unifyText(data interface{}, v encoding.TextUnmarshaler) error {
var s string
switch sdata := data.(type) {
case TextMarshaler:
text, err := sdata.MarshalText()
if err != nil {
return err
}
s = string(text)
case fmt.Stringer:
s = sdata.String()
case string:
s = sdata
case bool:
s = fmt.Sprintf("%v", sdata)
case int64:
s = fmt.Sprintf("%d", sdata)
case float64:
s = fmt.Sprintf("%f", sdata)
default:
return badtype("primitive (string-like)", data)
}
if err := v.UnmarshalText([]byte(s)); err != nil {
return err
}
return nil
}
// rvalue returns a reflect.Value of `v`. All pointers are resolved.
func rvalue(v interface{}) reflect.Value {
return indirect(reflect.ValueOf(v))
}
// indirect returns the value pointed to by a pointer.
// Pointers are followed until the value is not a pointer.
// New values are allocated for each nil pointer.
//
// An exception to this rule is if the value satisfies an interface of
// interest to us (like encoding.TextUnmarshaler).
func indirect(v reflect.Value) reflect.Value {
if v.Kind() != reflect.Ptr {
if v.CanSet() {
pv := v.Addr()
if _, ok := pv.Interface().(encoding.TextUnmarshaler); ok {
return pv
}
}
return v
}
if v.IsNil() {
v.Set(reflect.New(v.Type().Elem()))
}
return indirect(reflect.Indirect(v))
}
func isUnifiable(rv reflect.Value) bool {
if rv.CanSet() {
return true
}
if _, ok := rv.Interface().(encoding.TextUnmarshaler); ok {
return true
}
return false
}
func e(format string, args ...interface{}) error {
return fmt.Errorf("toml: "+format, args...)
}
func badtype(expected string, data interface{}) error {
return e("cannot load TOML value of type %T into a Go %s", data, expected)
}

18
vendor/github.com/BurntSushi/toml/decode_go116.go generated vendored Normal file
View file

@ -0,0 +1,18 @@
// +build go1.16
package toml
import (
"io/fs"
)
// DecodeFS is just like Decode, except it will automatically read the contents
// of the file at `path` from a fs.FS instance.
func DecodeFS(fsys fs.FS, path string, v interface{}) (MetaData, error) {
fp, err := fsys.Open(path)
if err != nil {
return MetaData{}, err
}
defer fp.Close()
return NewDecoder(fp).Decode(v)
}

123
vendor/github.com/BurntSushi/toml/decode_meta.go generated vendored Normal file
View file

@ -0,0 +1,123 @@
package toml
import "strings"
// MetaData allows access to meta information about TOML data that may not be
// inferable via reflection. In particular, whether a key has been defined and
// the TOML type of a key.
type MetaData struct {
mapping map[string]interface{}
types map[string]tomlType
keys []Key
decoded map[string]bool
context Key // Used only during decoding.
}
// IsDefined reports if the key exists in the TOML data.
//
// The key should be specified hierarchically, for example to access the TOML
// key "a.b.c" you would use:
//
// IsDefined("a", "b", "c")
//
// IsDefined will return false if an empty key given. Keys are case sensitive.
func (md *MetaData) IsDefined(key ...string) bool {
if len(key) == 0 {
return false
}
var hash map[string]interface{}
var ok bool
var hashOrVal interface{} = md.mapping
for _, k := range key {
if hash, ok = hashOrVal.(map[string]interface{}); !ok {
return false
}
if hashOrVal, ok = hash[k]; !ok {
return false
}
}
return true
}
// Type returns a string representation of the type of the key specified.
//
// Type will return the empty string if given an empty key or a key that does
// not exist. Keys are case sensitive.
func (md *MetaData) Type(key ...string) string {
fullkey := strings.Join(key, ".")
if typ, ok := md.types[fullkey]; ok {
return typ.typeString()
}
return ""
}
// Key represents any TOML key, including key groups. Use (MetaData).Keys to get
// values of this type.
type Key []string
func (k Key) String() string { return strings.Join(k, ".") }
func (k Key) maybeQuotedAll() string {
var ss []string
for i := range k {
ss = append(ss, k.maybeQuoted(i))
}
return strings.Join(ss, ".")
}
func (k Key) maybeQuoted(i int) string {
if k[i] == "" {
return `""`
}
quote := false
for _, c := range k[i] {
if !isBareKeyChar(c) {
quote = true
break
}
}
if quote {
return `"` + quotedReplacer.Replace(k[i]) + `"`
}
return k[i]
}
func (k Key) add(piece string) Key {
newKey := make(Key, len(k)+1)
copy(newKey, k)
newKey[len(k)] = piece
return newKey
}
// Keys returns a slice of every key in the TOML data, including key groups.
//
// Each key is itself a slice, where the first element is the top of the
// hierarchy and the last is the most specific. The list will have the same
// order as the keys appeared in the TOML data.
//
// All keys returned are non-empty.
func (md *MetaData) Keys() []Key {
return md.keys
}
// Undecoded returns all keys that have not been decoded in the order in which
// they appear in the original TOML document.
//
// This includes keys that haven't been decoded because of a Primitive value.
// Once the Primitive value is decoded, the keys will be considered decoded.
//
// Also note that decoding into an empty interface will result in no decoding,
// and so no keys will be considered decoded.
//
// In this sense, the Undecoded keys correspond to keys in the TOML document
// that do not have a concrete type in your representation.
func (md *MetaData) Undecoded() []Key {
undecoded := make([]Key, 0, len(md.keys))
for _, key := range md.keys {
if !md.decoded[key.String()] {
undecoded = append(undecoded, key)
}
}
return undecoded
}

33
vendor/github.com/BurntSushi/toml/deprecated.go generated vendored Normal file
View file

@ -0,0 +1,33 @@
package toml
import (
"encoding"
"io"
)
// DEPRECATED!
//
// Use the identical encoding.TextMarshaler instead. It is defined here to
// support Go 1.1 and older.
type TextMarshaler encoding.TextMarshaler
// DEPRECATED!
//
// Use the identical encoding.TextUnmarshaler instead. It is defined here to
// support Go 1.1 and older.
type TextUnmarshaler encoding.TextUnmarshaler
// DEPRECATED!
//
// Use MetaData.PrimitiveDecode instead.
func PrimitiveDecode(primValue Primitive, v interface{}) error {
md := MetaData{decoded: make(map[string]bool)}
return md.unify(primValue.undecoded, rvalue(v))
}
// DEPRECATED!
//
// Use NewDecoder(reader).Decode(&v) instead.
func DecodeReader(r io.Reader, v interface{}) (MetaData, error) {
return NewDecoder(r).Decode(v)
}

13
vendor/github.com/BurntSushi/toml/doc.go generated vendored Normal file
View file

@ -0,0 +1,13 @@
/*
Package toml implements decoding and encoding of TOML files.
This package supports TOML v1.0.0, as listed on https://toml.io
There is also support for delaying decoding with the Primitive type, and
querying the set of keys in a TOML document with the MetaData type.
The github.com/BurntSushi/toml/cmd/tomlv package implements a TOML validator,
and can be used to verify if TOML document is valid. It can also be used to
print the type of each key.
*/
package toml

650
vendor/github.com/BurntSushi/toml/encode.go generated vendored Normal file
View file

@ -0,0 +1,650 @@
package toml
import (
"bufio"
"encoding"
"errors"
"fmt"
"io"
"math"
"reflect"
"sort"
"strconv"
"strings"
"time"
"github.com/BurntSushi/toml/internal"
)
type tomlEncodeError struct{ error }
var (
errArrayNilElement = errors.New("toml: cannot encode array with nil element")
errNonString = errors.New("toml: cannot encode a map with non-string key type")
errAnonNonStruct = errors.New("toml: cannot encode an anonymous field that is not a struct")
errNoKey = errors.New("toml: top-level values must be Go maps or structs")
errAnything = errors.New("") // used in testing
)
var quotedReplacer = strings.NewReplacer(
"\"", "\\\"",
"\\", "\\\\",
"\x00", `\u0000`,
"\x01", `\u0001`,
"\x02", `\u0002`,
"\x03", `\u0003`,
"\x04", `\u0004`,
"\x05", `\u0005`,
"\x06", `\u0006`,
"\x07", `\u0007`,
"\b", `\b`,
"\t", `\t`,
"\n", `\n`,
"\x0b", `\u000b`,
"\f", `\f`,
"\r", `\r`,
"\x0e", `\u000e`,
"\x0f", `\u000f`,
"\x10", `\u0010`,
"\x11", `\u0011`,
"\x12", `\u0012`,
"\x13", `\u0013`,
"\x14", `\u0014`,
"\x15", `\u0015`,
"\x16", `\u0016`,
"\x17", `\u0017`,
"\x18", `\u0018`,
"\x19", `\u0019`,
"\x1a", `\u001a`,
"\x1b", `\u001b`,
"\x1c", `\u001c`,
"\x1d", `\u001d`,
"\x1e", `\u001e`,
"\x1f", `\u001f`,
"\x7f", `\u007f`,
)
// Encoder encodes a Go to a TOML document.
//
// The mapping between Go values and TOML values should be precisely the same as
// for the Decode* functions. Similarly, the TextMarshaler interface is
// supported by encoding the resulting bytes as strings. If you want to write
// arbitrary binary data then you will need to use something like base64 since
// TOML does not have any binary types.
//
// When encoding TOML hashes (Go maps or structs), keys without any sub-hashes
// are encoded first.
//
// Go maps will be sorted alphabetically by key for deterministic output.
//
// Encoding Go values without a corresponding TOML representation will return an
// error. Examples of this includes maps with non-string keys, slices with nil
// elements, embedded non-struct types, and nested slices containing maps or
// structs. (e.g. [][]map[string]string is not allowed but []map[string]string
// is okay, as is []map[string][]string).
//
// NOTE: Only exported keys are encoded due to the use of reflection. Unexported
// keys are silently discarded.
type Encoder struct {
// The string to use for a single indentation level. The default is two
// spaces.
Indent string
// hasWritten is whether we have written any output to w yet.
hasWritten bool
w *bufio.Writer
}
// NewEncoder create a new Encoder.
func NewEncoder(w io.Writer) *Encoder {
return &Encoder{
w: bufio.NewWriter(w),
Indent: " ",
}
}
// Encode writes a TOML representation of the Go value to the Encoder's writer.
//
// An error is returned if the value given cannot be encoded to a valid TOML
// document.
func (enc *Encoder) Encode(v interface{}) error {
rv := eindirect(reflect.ValueOf(v))
if err := enc.safeEncode(Key([]string{}), rv); err != nil {
return err
}
return enc.w.Flush()
}
func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) {
defer func() {
if r := recover(); r != nil {
if terr, ok := r.(tomlEncodeError); ok {
err = terr.error
return
}
panic(r)
}
}()
enc.encode(key, rv)
return nil
}
func (enc *Encoder) encode(key Key, rv reflect.Value) {
// Special case. Time needs to be in ISO8601 format.
// Special case. If we can marshal the type to text, then we used that.
// Basically, this prevents the encoder for handling these types as
// generic structs (or whatever the underlying type of a TextMarshaler is).
switch t := rv.Interface().(type) {
case time.Time, encoding.TextMarshaler:
enc.writeKeyValue(key, rv, false)
return
// TODO: #76 would make this superfluous after implemented.
case Primitive:
enc.encode(key, reflect.ValueOf(t.undecoded))
return
}
k := rv.Kind()
switch k {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64,
reflect.Float32, reflect.Float64, reflect.String, reflect.Bool:
enc.writeKeyValue(key, rv, false)
case reflect.Array, reflect.Slice:
if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) {
enc.eArrayOfTables(key, rv)
} else {
enc.writeKeyValue(key, rv, false)
}
case reflect.Interface:
if rv.IsNil() {
return
}
enc.encode(key, rv.Elem())
case reflect.Map:
if rv.IsNil() {
return
}
enc.eTable(key, rv)
case reflect.Ptr:
if rv.IsNil() {
return
}
enc.encode(key, rv.Elem())
case reflect.Struct:
enc.eTable(key, rv)
default:
encPanic(fmt.Errorf("unsupported type for key '%s': %s", key, k))
}
}
// eElement encodes any value that can be an array element.
func (enc *Encoder) eElement(rv reflect.Value) {
switch v := rv.Interface().(type) {
case time.Time: // Using TextMarshaler adds extra quotes, which we don't want.
format := time.RFC3339Nano
switch v.Location() {
case internal.LocalDatetime:
format = "2006-01-02T15:04:05.999999999"
case internal.LocalDate:
format = "2006-01-02"
case internal.LocalTime:
format = "15:04:05.999999999"
}
switch v.Location() {
default:
enc.wf(v.Format(format))
case internal.LocalDatetime, internal.LocalDate, internal.LocalTime:
enc.wf(v.In(time.UTC).Format(format))
}
return
case encoding.TextMarshaler:
// Use text marshaler if it's available for this value.
if s, err := v.MarshalText(); err != nil {
encPanic(err)
} else {
enc.writeQuoted(string(s))
}
return
}
switch rv.Kind() {
case reflect.String:
enc.writeQuoted(rv.String())
case reflect.Bool:
enc.wf(strconv.FormatBool(rv.Bool()))
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
enc.wf(strconv.FormatInt(rv.Int(), 10))
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
enc.wf(strconv.FormatUint(rv.Uint(), 10))
case reflect.Float32:
f := rv.Float()
if math.IsNaN(f) {
enc.wf("nan")
} else if math.IsInf(f, 0) {
enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)])
} else {
enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 32)))
}
case reflect.Float64:
f := rv.Float()
if math.IsNaN(f) {
enc.wf("nan")
} else if math.IsInf(f, 0) {
enc.wf("%cinf", map[bool]byte{true: '-', false: '+'}[math.Signbit(f)])
} else {
enc.wf(floatAddDecimal(strconv.FormatFloat(f, 'f', -1, 64)))
}
case reflect.Array, reflect.Slice:
enc.eArrayOrSliceElement(rv)
case reflect.Struct:
enc.eStruct(nil, rv, true)
case reflect.Map:
enc.eMap(nil, rv, true)
case reflect.Interface:
enc.eElement(rv.Elem())
default:
encPanic(fmt.Errorf("unexpected primitive type: %T", rv.Interface()))
}
}
// By the TOML spec, all floats must have a decimal with at least one number on
// either side.
func floatAddDecimal(fstr string) string {
if !strings.Contains(fstr, ".") {
return fstr + ".0"
}
return fstr
}
func (enc *Encoder) writeQuoted(s string) {
enc.wf("\"%s\"", quotedReplacer.Replace(s))
}
func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) {
length := rv.Len()
enc.wf("[")
for i := 0; i < length; i++ {
elem := rv.Index(i)
enc.eElement(elem)
if i != length-1 {
enc.wf(", ")
}
}
enc.wf("]")
}
func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) {
if len(key) == 0 {
encPanic(errNoKey)
}
for i := 0; i < rv.Len(); i++ {
trv := rv.Index(i)
if isNil(trv) {
continue
}
enc.newline()
enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll())
enc.newline()
enc.eMapOrStruct(key, trv, false)
}
}
func (enc *Encoder) eTable(key Key, rv reflect.Value) {
if len(key) == 1 {
// Output an extra newline between top-level tables.
// (The newline isn't written if nothing else has been written though.)
enc.newline()
}
if len(key) > 0 {
enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll())
enc.newline()
}
enc.eMapOrStruct(key, rv, false)
}
func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value, inline bool) {
switch rv := eindirect(rv); rv.Kind() {
case reflect.Map:
enc.eMap(key, rv, inline)
case reflect.Struct:
enc.eStruct(key, rv, inline)
default:
// Should never happen?
panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String())
}
}
func (enc *Encoder) eMap(key Key, rv reflect.Value, inline bool) {
rt := rv.Type()
if rt.Key().Kind() != reflect.String {
encPanic(errNonString)
}
// Sort keys so that we have deterministic output. And write keys directly
// underneath this key first, before writing sub-structs or sub-maps.
var mapKeysDirect, mapKeysSub []string
for _, mapKey := range rv.MapKeys() {
k := mapKey.String()
if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) {
mapKeysSub = append(mapKeysSub, k)
} else {
mapKeysDirect = append(mapKeysDirect, k)
}
}
var writeMapKeys = func(mapKeys []string, trailC bool) {
sort.Strings(mapKeys)
for i, mapKey := range mapKeys {
val := rv.MapIndex(reflect.ValueOf(mapKey))
if isNil(val) {
continue
}
if inline {
enc.writeKeyValue(Key{mapKey}, val, true)
if trailC || i != len(mapKeys)-1 {
enc.wf(", ")
}
} else {
enc.encode(key.add(mapKey), val)
}
}
}
if inline {
enc.wf("{")
}
writeMapKeys(mapKeysDirect, len(mapKeysSub) > 0)
writeMapKeys(mapKeysSub, false)
if inline {
enc.wf("}")
}
}
func (enc *Encoder) eStruct(key Key, rv reflect.Value, inline bool) {
// Write keys for fields directly under this key first, because if we write
// a field that creates a new table then all keys under it will be in that
// table (not the one we're writing here).
//
// Fields is a [][]int: for fieldsDirect this always has one entry (the
// struct index). For fieldsSub it contains two entries: the parent field
// index from tv, and the field indexes for the fields of the sub.
var (
rt = rv.Type()
fieldsDirect, fieldsSub [][]int
addFields func(rt reflect.Type, rv reflect.Value, start []int)
)
addFields = func(rt reflect.Type, rv reflect.Value, start []int) {
for i := 0; i < rt.NumField(); i++ {
f := rt.Field(i)
if f.PkgPath != "" && !f.Anonymous { /// Skip unexported fields.
continue
}
frv := rv.Field(i)
// Treat anonymous struct fields with tag names as though they are
// not anonymous, like encoding/json does.
//
// Non-struct anonymous fields use the normal encoding logic.
if f.Anonymous {
t := f.Type
switch t.Kind() {
case reflect.Struct:
if getOptions(f.Tag).name == "" {
addFields(t, frv, append(start, f.Index...))
continue
}
case reflect.Ptr:
if t.Elem().Kind() == reflect.Struct && getOptions(f.Tag).name == "" {
if !frv.IsNil() {
addFields(t.Elem(), frv.Elem(), append(start, f.Index...))
}
continue
}
}
}
if typeIsHash(tomlTypeOfGo(frv)) {
fieldsSub = append(fieldsSub, append(start, f.Index...))
} else {
fieldsDirect = append(fieldsDirect, append(start, f.Index...))
}
}
}
addFields(rt, rv, nil)
writeFields := func(fields [][]int) {
for _, fieldIndex := range fields {
fieldType := rt.FieldByIndex(fieldIndex)
fieldVal := rv.FieldByIndex(fieldIndex)
if isNil(fieldVal) { /// Don't write anything for nil fields.
continue
}
opts := getOptions(fieldType.Tag)
if opts.skip {
continue
}
keyName := fieldType.Name
if opts.name != "" {
keyName = opts.name
}
if opts.omitempty && isEmpty(fieldVal) {
continue
}
if opts.omitzero && isZero(fieldVal) {
continue
}
if inline {
enc.writeKeyValue(Key{keyName}, fieldVal, true)
if fieldIndex[0] != len(fields)-1 {
enc.wf(", ")
}
} else {
enc.encode(key.add(keyName), fieldVal)
}
}
}
if inline {
enc.wf("{")
}
writeFields(fieldsDirect)
writeFields(fieldsSub)
if inline {
enc.wf("}")
}
}
// tomlTypeName returns the TOML type name of the Go value's type. It is
// used to determine whether the types of array elements are mixed (which is
// forbidden). If the Go value is nil, then it is illegal for it to be an array
// element, and valueIsNil is returned as true.
// Returns the TOML type of a Go value. The type may be `nil`, which means
// no concrete TOML type could be found.
func tomlTypeOfGo(rv reflect.Value) tomlType {
if isNil(rv) || !rv.IsValid() {
return nil
}
switch rv.Kind() {
case reflect.Bool:
return tomlBool
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32,
reflect.Int64,
reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32,
reflect.Uint64:
return tomlInteger
case reflect.Float32, reflect.Float64:
return tomlFloat
case reflect.Array, reflect.Slice:
if typeEqual(tomlHash, tomlArrayType(rv)) {
return tomlArrayHash
}
return tomlArray
case reflect.Ptr, reflect.Interface:
return tomlTypeOfGo(rv.Elem())
case reflect.String:
return tomlString
case reflect.Map:
return tomlHash
case reflect.Struct:
switch rv.Interface().(type) {
case time.Time:
return tomlDatetime
case encoding.TextMarshaler:
return tomlString
default:
// Someone used a pointer receiver: we can make it work for pointer
// values.
if rv.CanAddr() {
_, ok := rv.Addr().Interface().(encoding.TextMarshaler)
if ok {
return tomlString
}
}
return tomlHash
}
default:
_, ok := rv.Interface().(encoding.TextMarshaler)
if ok {
return tomlString
}
encPanic(errors.New("unsupported type: " + rv.Kind().String()))
panic("") // Need *some* return value
}
}
// tomlArrayType returns the element type of a TOML array. The type returned
// may be nil if it cannot be determined (e.g., a nil slice or a zero length
// slize). This function may also panic if it finds a type that cannot be
// expressed in TOML (such as nil elements, heterogeneous arrays or directly
// nested arrays of tables).
func tomlArrayType(rv reflect.Value) tomlType {
if isNil(rv) || !rv.IsValid() || rv.Len() == 0 {
return nil
}
/// Don't allow nil.
rvlen := rv.Len()
for i := 1; i < rvlen; i++ {
if tomlTypeOfGo(rv.Index(i)) == nil {
encPanic(errArrayNilElement)
}
}
firstType := tomlTypeOfGo(rv.Index(0))
if firstType == nil {
encPanic(errArrayNilElement)
}
return firstType
}
type tagOptions struct {
skip bool // "-"
name string
omitempty bool
omitzero bool
}
func getOptions(tag reflect.StructTag) tagOptions {
t := tag.Get("toml")
if t == "-" {
return tagOptions{skip: true}
}
var opts tagOptions
parts := strings.Split(t, ",")
opts.name = parts[0]
for _, s := range parts[1:] {
switch s {
case "omitempty":
opts.omitempty = true
case "omitzero":
opts.omitzero = true
}
}
return opts
}
func isZero(rv reflect.Value) bool {
switch rv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return rv.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
return rv.Uint() == 0
case reflect.Float32, reflect.Float64:
return rv.Float() == 0.0
}
return false
}
func isEmpty(rv reflect.Value) bool {
switch rv.Kind() {
case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
return rv.Len() == 0
case reflect.Bool:
return !rv.Bool()
}
return false
}
func (enc *Encoder) newline() {
if enc.hasWritten {
enc.wf("\n")
}
}
// Write a key/value pair:
//
// key = <any value>
//
// If inline is true it won't add a newline at the end.
func (enc *Encoder) writeKeyValue(key Key, val reflect.Value, inline bool) {
if len(key) == 0 {
encPanic(errNoKey)
}
enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1))
enc.eElement(val)
if !inline {
enc.newline()
}
}
func (enc *Encoder) wf(format string, v ...interface{}) {
if _, err := fmt.Fprintf(enc.w, format, v...); err != nil {
encPanic(err)
}
enc.hasWritten = true
}
func (enc *Encoder) indentStr(key Key) string {
return strings.Repeat(enc.Indent, len(key)-1)
}
func encPanic(err error) {
panic(tomlEncodeError{err})
}
func eindirect(v reflect.Value) reflect.Value {
switch v.Kind() {
case reflect.Ptr, reflect.Interface:
return eindirect(v.Elem())
default:
return v
}
}
func isNil(rv reflect.Value) bool {
switch rv.Kind() {
case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice:
return rv.IsNil()
default:
return false
}
}

3
vendor/github.com/BurntSushi/toml/go.mod generated vendored Normal file
View file

@ -0,0 +1,3 @@
module github.com/BurntSushi/toml
go 1.16

0
vendor/github.com/BurntSushi/toml/go.sum generated vendored Normal file
View file

36
vendor/github.com/BurntSushi/toml/internal/tz.go generated vendored Normal file
View file

@ -0,0 +1,36 @@
package internal
import "time"
// Timezones used for local datetime, date, and time TOML types.
//
// The exact way times and dates without a timezone should be interpreted is not
// well-defined in the TOML specification and left to the implementation. These
// defaults to current local timezone offset of the computer, but this can be
// changed by changing these variables before decoding.
//
// TODO:
// Ideally we'd like to offer people the ability to configure the used timezone
// by setting Decoder.Timezone and Encoder.Timezone; however, this is a bit
// tricky: the reason we use three different variables for this is to support
// round-tripping without these specific TZ names we wouldn't know which
// format to use.
//
// There isn't a good way to encode this right now though, and passing this sort
// of information also ties in to various related issues such as string format
// encoding, encoding of comments, etc.
//
// So, for the time being, just put this in internal until we can write a good
// comprehensive API for doing all of this.
//
// The reason they're exported is because they're referred from in e.g.
// internal/tag.
//
// Note that this behaviour is valid according to the TOML spec as the exact
// behaviour is left up to implementations.
var (
localOffset = func() int { _, o := time.Now().Zone(); return o }()
LocalDatetime = time.FixedZone("datetime-local", localOffset)
LocalDate = time.FixedZone("date-local", localOffset)
LocalTime = time.FixedZone("time-local", localOffset)
)

1225
vendor/github.com/BurntSushi/toml/lex.go generated vendored Normal file

File diff suppressed because it is too large Load diff

739
vendor/github.com/BurntSushi/toml/parse.go generated vendored Normal file
View file

@ -0,0 +1,739 @@
package toml
import (
"errors"
"fmt"
"strconv"
"strings"
"time"
"unicode/utf8"
"github.com/BurntSushi/toml/internal"
)
type parser struct {
mapping map[string]interface{}
types map[string]tomlType
lx *lexer
ordered []Key // List of keys in the order that they appear in the TOML data.
context Key // Full key for the current hash in scope.
currentKey string // Base key name for everything except hashes.
approxLine int // Rough approximation of line number
implicits map[string]bool // Record implied keys (e.g. 'key.group.names').
}
// ParseError is used when a file can't be parsed: for example invalid integer
// literals, duplicate keys, etc.
type ParseError struct {
Message string
Line int
LastKey string
}
func (pe ParseError) Error() string {
return fmt.Sprintf("Near line %d (last key parsed '%s'): %s",
pe.Line, pe.LastKey, pe.Message)
}
func parse(data string) (p *parser, err error) {
defer func() {
if r := recover(); r != nil {
var ok bool
if err, ok = r.(ParseError); ok {
return
}
panic(r)
}
}()
// Read over BOM; do this here as the lexer calls utf8.DecodeRuneInString()
// which mangles stuff.
if strings.HasPrefix(data, "\xff\xfe") || strings.HasPrefix(data, "\xfe\xff") {
data = data[2:]
}
// Examine first few bytes for NULL bytes; this probably means it's a UTF-16
// file (second byte in surrogate pair being NULL). Again, do this here to
// avoid having to deal with UTF-8/16 stuff in the lexer.
ex := 6
if len(data) < 6 {
ex = len(data)
}
if strings.ContainsRune(data[:ex], 0) {
return nil, errors.New("files cannot contain NULL bytes; probably using UTF-16; TOML files must be UTF-8")
}
p = &parser{
mapping: make(map[string]interface{}),
types: make(map[string]tomlType),
lx: lex(data),
ordered: make([]Key, 0),
implicits: make(map[string]bool),
}
for {
item := p.next()
if item.typ == itemEOF {
break
}
p.topLevel(item)
}
return p, nil
}
func (p *parser) panicf(format string, v ...interface{}) {
msg := fmt.Sprintf(format, v...)
panic(ParseError{
Message: msg,
Line: p.approxLine,
LastKey: p.current(),
})
}
func (p *parser) next() item {
it := p.lx.nextItem()
//fmt.Printf("ITEM %-18s line %-3d │ %q\n", it.typ, it.line, it.val)
if it.typ == itemError {
p.panicf("%s", it.val)
}
return it
}
func (p *parser) bug(format string, v ...interface{}) {
panic(fmt.Sprintf("BUG: "+format+"\n\n", v...))
}
func (p *parser) expect(typ itemType) item {
it := p.next()
p.assertEqual(typ, it.typ)
return it
}
func (p *parser) assertEqual(expected, got itemType) {
if expected != got {
p.bug("Expected '%s' but got '%s'.", expected, got)
}
}
func (p *parser) topLevel(item item) {
switch item.typ {
case itemCommentStart: // # ..
p.approxLine = item.line
p.expect(itemText)
case itemTableStart: // [ .. ]
name := p.next()
p.approxLine = name.line
var key Key
for ; name.typ != itemTableEnd && name.typ != itemEOF; name = p.next() {
key = append(key, p.keyString(name))
}
p.assertEqual(itemTableEnd, name.typ)
p.addContext(key, false)
p.setType("", tomlHash)
p.ordered = append(p.ordered, key)
case itemArrayTableStart: // [[ .. ]]
name := p.next()
p.approxLine = name.line
var key Key
for ; name.typ != itemArrayTableEnd && name.typ != itemEOF; name = p.next() {
key = append(key, p.keyString(name))
}
p.assertEqual(itemArrayTableEnd, name.typ)
p.addContext(key, true)
p.setType("", tomlArrayHash)
p.ordered = append(p.ordered, key)
case itemKeyStart: // key = ..
outerContext := p.context
/// Read all the key parts (e.g. 'a' and 'b' in 'a.b')
k := p.next()
p.approxLine = k.line
var key Key
for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() {
key = append(key, p.keyString(k))
}
p.assertEqual(itemKeyEnd, k.typ)
/// The current key is the last part.
p.currentKey = key[len(key)-1]
/// All the other parts (if any) are the context; need to set each part
/// as implicit.
context := key[:len(key)-1]
for i := range context {
p.addImplicitContext(append(p.context, context[i:i+1]...))
}
/// Set value.
val, typ := p.value(p.next(), false)
p.set(p.currentKey, val, typ)
p.ordered = append(p.ordered, p.context.add(p.currentKey))
/// Remove the context we added (preserving any context from [tbl] lines).
p.context = outerContext
p.currentKey = ""
default:
p.bug("Unexpected type at top level: %s", item.typ)
}
}
// Gets a string for a key (or part of a key in a table name).
func (p *parser) keyString(it item) string {
switch it.typ {
case itemText:
return it.val
case itemString, itemMultilineString,
itemRawString, itemRawMultilineString:
s, _ := p.value(it, false)
return s.(string)
default:
p.bug("Unexpected key type: %s", it.typ)
}
panic("unreachable")
}
var datetimeRepl = strings.NewReplacer(
"z", "Z",
"t", "T",
" ", "T")
// value translates an expected value from the lexer into a Go value wrapped
// as an empty interface.
func (p *parser) value(it item, parentIsArray bool) (interface{}, tomlType) {
switch it.typ {
case itemString:
return p.replaceEscapes(it.val), p.typeOfPrimitive(it)
case itemMultilineString:
return p.replaceEscapes(stripFirstNewline(stripEscapedNewlines(it.val))), p.typeOfPrimitive(it)
case itemRawString:
return it.val, p.typeOfPrimitive(it)
case itemRawMultilineString:
return stripFirstNewline(it.val), p.typeOfPrimitive(it)
case itemInteger:
return p.valueInteger(it)
case itemFloat:
return p.valueFloat(it)
case itemBool:
switch it.val {
case "true":
return true, p.typeOfPrimitive(it)
case "false":
return false, p.typeOfPrimitive(it)
default:
p.bug("Expected boolean value, but got '%s'.", it.val)
}
case itemDatetime:
return p.valueDatetime(it)
case itemArray:
return p.valueArray(it)
case itemInlineTableStart:
return p.valueInlineTable(it, parentIsArray)
default:
p.bug("Unexpected value type: %s", it.typ)
}
panic("unreachable")
}
func (p *parser) valueInteger(it item) (interface{}, tomlType) {
if !numUnderscoresOK(it.val) {
p.panicf("Invalid integer %q: underscores must be surrounded by digits", it.val)
}
if numHasLeadingZero(it.val) {
p.panicf("Invalid integer %q: cannot have leading zeroes", it.val)
}
num, err := strconv.ParseInt(it.val, 0, 64)
if err != nil {
// Distinguish integer values. Normally, it'd be a bug if the lexer
// provides an invalid integer, but it's possible that the number is
// out of range of valid values (which the lexer cannot determine).
// So mark the former as a bug but the latter as a legitimate user
// error.
if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange {
p.panicf("Integer '%s' is out of the range of 64-bit signed integers.", it.val)
} else {
p.bug("Expected integer value, but got '%s'.", it.val)
}
}
return num, p.typeOfPrimitive(it)
}
func (p *parser) valueFloat(it item) (interface{}, tomlType) {
parts := strings.FieldsFunc(it.val, func(r rune) bool {
switch r {
case '.', 'e', 'E':
return true
}
return false
})
for _, part := range parts {
if !numUnderscoresOK(part) {
p.panicf("Invalid float %q: underscores must be surrounded by digits", it.val)
}
}
if len(parts) > 0 && numHasLeadingZero(parts[0]) {
p.panicf("Invalid float %q: cannot have leading zeroes", it.val)
}
if !numPeriodsOK(it.val) {
// As a special case, numbers like '123.' or '1.e2',
// which are valid as far as Go/strconv are concerned,
// must be rejected because TOML says that a fractional
// part consists of '.' followed by 1+ digits.
p.panicf("Invalid float %q: '.' must be followed by one or more digits", it.val)
}
val := strings.Replace(it.val, "_", "", -1)
if val == "+nan" || val == "-nan" { // Go doesn't support this, but TOML spec does.
val = "nan"
}
num, err := strconv.ParseFloat(val, 64)
if err != nil {
if e, ok := err.(*strconv.NumError); ok && e.Err == strconv.ErrRange {
p.panicf("Float '%s' is out of the range of 64-bit IEEE-754 floating-point numbers.", it.val)
} else {
p.panicf("Invalid float value: %q", it.val)
}
}
return num, p.typeOfPrimitive(it)
}
var dtTypes = []struct {
fmt string
zone *time.Location
}{
{time.RFC3339Nano, time.Local},
{"2006-01-02T15:04:05.999999999", internal.LocalDatetime},
{"2006-01-02", internal.LocalDate},
{"15:04:05.999999999", internal.LocalTime},
}
func (p *parser) valueDatetime(it item) (interface{}, tomlType) {
it.val = datetimeRepl.Replace(it.val)
var (
t time.Time
ok bool
err error
)
for _, dt := range dtTypes {
t, err = time.ParseInLocation(dt.fmt, it.val, dt.zone)
if err == nil {
ok = true
break
}
}
if !ok {
p.panicf("Invalid TOML Datetime: %q.", it.val)
}
return t, p.typeOfPrimitive(it)
}
func (p *parser) valueArray(it item) (interface{}, tomlType) {
p.setType(p.currentKey, tomlArray)
// p.setType(p.currentKey, typ)
var (
array []interface{}
types []tomlType
)
for it = p.next(); it.typ != itemArrayEnd; it = p.next() {
if it.typ == itemCommentStart {
p.expect(itemText)
continue
}
val, typ := p.value(it, true)
array = append(array, val)
types = append(types, typ)
}
return array, tomlArray
}
func (p *parser) valueInlineTable(it item, parentIsArray bool) (interface{}, tomlType) {
var (
hash = make(map[string]interface{})
outerContext = p.context
outerKey = p.currentKey
)
p.context = append(p.context, p.currentKey)
prevContext := p.context
p.currentKey = ""
p.addImplicit(p.context)
p.addContext(p.context, parentIsArray)
/// Loop over all table key/value pairs.
for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() {
if it.typ == itemCommentStart {
p.expect(itemText)
continue
}
/// Read all key parts.
k := p.next()
p.approxLine = k.line
var key Key
for ; k.typ != itemKeyEnd && k.typ != itemEOF; k = p.next() {
key = append(key, p.keyString(k))
}
p.assertEqual(itemKeyEnd, k.typ)
/// The current key is the last part.
p.currentKey = key[len(key)-1]
/// All the other parts (if any) are the context; need to set each part
/// as implicit.
context := key[:len(key)-1]
for i := range context {
p.addImplicitContext(append(p.context, context[i:i+1]...))
}
/// Set the value.
val, typ := p.value(p.next(), false)
p.set(p.currentKey, val, typ)
p.ordered = append(p.ordered, p.context.add(p.currentKey))
hash[p.currentKey] = val
/// Restore context.
p.context = prevContext
}
p.context = outerContext
p.currentKey = outerKey
return hash, tomlHash
}
// numHasLeadingZero checks if this number has leading zeroes, allowing for '0',
// +/- signs, and base prefixes.
func numHasLeadingZero(s string) bool {
if len(s) > 1 && s[0] == '0' && isDigit(rune(s[1])) { // >1 to allow "0" and isDigit to allow 0x
return true
}
if len(s) > 2 && (s[0] == '-' || s[0] == '+') && s[1] == '0' {
return true
}
return false
}
// numUnderscoresOK checks whether each underscore in s is surrounded by
// characters that are not underscores.
func numUnderscoresOK(s string) bool {
switch s {
case "nan", "+nan", "-nan", "inf", "-inf", "+inf":
return true
}
accept := false
for _, r := range s {
if r == '_' {
if !accept {
return false
}
}
// isHexadecimal is a superset of all the permissable characters
// surrounding an underscore.
accept = isHexadecimal(r)
}
return accept
}
// numPeriodsOK checks whether every period in s is followed by a digit.
func numPeriodsOK(s string) bool {
period := false
for _, r := range s {
if period && !isDigit(r) {
return false
}
period = r == '.'
}
return !period
}
// Set the current context of the parser, where the context is either a hash or
// an array of hashes, depending on the value of the `array` parameter.
//
// Establishing the context also makes sure that the key isn't a duplicate, and
// will create implicit hashes automatically.
func (p *parser) addContext(key Key, array bool) {
var ok bool
// Always start at the top level and drill down for our context.
hashContext := p.mapping
keyContext := make(Key, 0)
// We only need implicit hashes for key[0:-1]
for _, k := range key[0 : len(key)-1] {
_, ok = hashContext[k]
keyContext = append(keyContext, k)
// No key? Make an implicit hash and move on.
if !ok {
p.addImplicit(keyContext)
hashContext[k] = make(map[string]interface{})
}
// If the hash context is actually an array of tables, then set
// the hash context to the last element in that array.
//
// Otherwise, it better be a table, since this MUST be a key group (by
// virtue of it not being the last element in a key).
switch t := hashContext[k].(type) {
case []map[string]interface{}:
hashContext = t[len(t)-1]
case map[string]interface{}:
hashContext = t
default:
p.panicf("Key '%s' was already created as a hash.", keyContext)
}
}
p.context = keyContext
if array {
// If this is the first element for this array, then allocate a new
// list of tables for it.
k := key[len(key)-1]
if _, ok := hashContext[k]; !ok {
hashContext[k] = make([]map[string]interface{}, 0, 4)
}
// Add a new table. But make sure the key hasn't already been used
// for something else.
if hash, ok := hashContext[k].([]map[string]interface{}); ok {
hashContext[k] = append(hash, make(map[string]interface{}))
} else {
p.panicf("Key '%s' was already created and cannot be used as an array.", keyContext)
}
} else {
p.setValue(key[len(key)-1], make(map[string]interface{}))
}
p.context = append(p.context, key[len(key)-1])
}
// set calls setValue and setType.
func (p *parser) set(key string, val interface{}, typ tomlType) {
p.setValue(p.currentKey, val)
p.setType(p.currentKey, typ)
}
// setValue sets the given key to the given value in the current context.
// It will make sure that the key hasn't already been defined, account for
// implicit key groups.
func (p *parser) setValue(key string, value interface{}) {
var (
tmpHash interface{}
ok bool
hash = p.mapping
keyContext Key
)
for _, k := range p.context {
keyContext = append(keyContext, k)
if tmpHash, ok = hash[k]; !ok {
p.bug("Context for key '%s' has not been established.", keyContext)
}
switch t := tmpHash.(type) {
case []map[string]interface{}:
// The context is a table of hashes. Pick the most recent table
// defined as the current hash.
hash = t[len(t)-1]
case map[string]interface{}:
hash = t
default:
p.panicf("Key '%s' has already been defined.", keyContext)
}
}
keyContext = append(keyContext, key)
if _, ok := hash[key]; ok {
// Normally redefining keys isn't allowed, but the key could have been
// defined implicitly and it's allowed to be redefined concretely. (See
// the `valid/implicit-and-explicit-after.toml` in toml-test)
//
// But we have to make sure to stop marking it as an implicit. (So that
// another redefinition provokes an error.)
//
// Note that since it has already been defined (as a hash), we don't
// want to overwrite it. So our business is done.
if p.isArray(keyContext) {
p.removeImplicit(keyContext)
hash[key] = value
return
}
if p.isImplicit(keyContext) {
p.removeImplicit(keyContext)
return
}
// Otherwise, we have a concrete key trying to override a previous
// key, which is *always* wrong.
p.panicf("Key '%s' has already been defined.", keyContext)
}
hash[key] = value
}
// setType sets the type of a particular value at a given key.
// It should be called immediately AFTER setValue.
//
// Note that if `key` is empty, then the type given will be applied to the
// current context (which is either a table or an array of tables).
func (p *parser) setType(key string, typ tomlType) {
keyContext := make(Key, 0, len(p.context)+1)
for _, k := range p.context {
keyContext = append(keyContext, k)
}
if len(key) > 0 { // allow type setting for hashes
keyContext = append(keyContext, key)
}
p.types[keyContext.String()] = typ
}
// Implicit keys need to be created when tables are implied in "a.b.c.d = 1" and
// "[a.b.c]" (the "a", "b", and "c" hashes are never created explicitly).
func (p *parser) addImplicit(key Key) { p.implicits[key.String()] = true }
func (p *parser) removeImplicit(key Key) { p.implicits[key.String()] = false }
func (p *parser) isImplicit(key Key) bool { return p.implicits[key.String()] }
func (p *parser) isArray(key Key) bool { return p.types[key.String()] == tomlArray }
func (p *parser) addImplicitContext(key Key) {
p.addImplicit(key)
p.addContext(key, false)
}
// current returns the full key name of the current context.
func (p *parser) current() string {
if len(p.currentKey) == 0 {
return p.context.String()
}
if len(p.context) == 0 {
return p.currentKey
}
return fmt.Sprintf("%s.%s", p.context, p.currentKey)
}
func stripFirstNewline(s string) string {
if len(s) > 0 && s[0] == '\n' {
return s[1:]
}
if len(s) > 1 && s[0] == '\r' && s[1] == '\n' {
return s[2:]
}
return s
}
// Remove newlines inside triple-quoted strings if a line ends with "\".
func stripEscapedNewlines(s string) string {
split := strings.Split(s, "\n")
if len(split) < 1 {
return s
}
escNL := false // Keep track of the last non-blank line was escaped.
for i, line := range split {
line = strings.TrimRight(line, " \t\r")
if len(line) == 0 || line[len(line)-1] != '\\' {
split[i] = strings.TrimRight(split[i], "\r")
if !escNL && i != len(split)-1 {
split[i] += "\n"
}
continue
}
escBS := true
for j := len(line) - 1; j >= 0 && line[j] == '\\'; j-- {
escBS = !escBS
}
if escNL {
line = strings.TrimLeft(line, " \t\r")
}
escNL = !escBS
if escBS {
split[i] += "\n"
continue
}
split[i] = line[:len(line)-1] // Remove \
if len(split)-1 > i {
split[i+1] = strings.TrimLeft(split[i+1], " \t\r")
}
}
return strings.Join(split, "")
}
func (p *parser) replaceEscapes(str string) string {
var replaced []rune
s := []byte(str)
r := 0
for r < len(s) {
if s[r] != '\\' {
c, size := utf8.DecodeRune(s[r:])
r += size
replaced = append(replaced, c)
continue
}
r += 1
if r >= len(s) {
p.bug("Escape sequence at end of string.")
return ""
}
switch s[r] {
default:
p.bug("Expected valid escape code after \\, but got %q.", s[r])
return ""
case ' ', '\t':
p.panicf("invalid escape: '\\%c'", s[r])
return ""
case 'b':
replaced = append(replaced, rune(0x0008))
r += 1
case 't':
replaced = append(replaced, rune(0x0009))
r += 1
case 'n':
replaced = append(replaced, rune(0x000A))
r += 1
case 'f':
replaced = append(replaced, rune(0x000C))
r += 1
case 'r':
replaced = append(replaced, rune(0x000D))
r += 1
case '"':
replaced = append(replaced, rune(0x0022))
r += 1
case '\\':
replaced = append(replaced, rune(0x005C))
r += 1
case 'u':
// At this point, we know we have a Unicode escape of the form
// `uXXXX` at [r, r+5). (Because the lexer guarantees this
// for us.)
escaped := p.asciiEscapeToUnicode(s[r+1 : r+5])
replaced = append(replaced, escaped)
r += 5
case 'U':
// At this point, we know we have a Unicode escape of the form
// `uXXXX` at [r, r+9). (Because the lexer guarantees this
// for us.)
escaped := p.asciiEscapeToUnicode(s[r+1 : r+9])
replaced = append(replaced, escaped)
r += 9
}
}
return string(replaced)
}
func (p *parser) asciiEscapeToUnicode(bs []byte) rune {
s := string(bs)
hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32)
if err != nil {
p.bug("Could not parse '%s' as a hexadecimal number, but the "+
"lexer claims it's OK: %s", s, err)
}
if !utf8.ValidRune(rune(hex)) {
p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s)
}
return rune(hex)
}

70
vendor/github.com/BurntSushi/toml/type_check.go generated vendored Normal file
View file

@ -0,0 +1,70 @@
package toml
// tomlType represents any Go type that corresponds to a TOML type.
// While the first draft of the TOML spec has a simplistic type system that
// probably doesn't need this level of sophistication, we seem to be militating
// toward adding real composite types.
type tomlType interface {
typeString() string
}
// typeEqual accepts any two types and returns true if they are equal.
func typeEqual(t1, t2 tomlType) bool {
if t1 == nil || t2 == nil {
return false
}
return t1.typeString() == t2.typeString()
}
func typeIsHash(t tomlType) bool {
return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash)
}
type tomlBaseType string
func (btype tomlBaseType) typeString() string {
return string(btype)
}
func (btype tomlBaseType) String() string {
return btype.typeString()
}
var (
tomlInteger tomlBaseType = "Integer"
tomlFloat tomlBaseType = "Float"
tomlDatetime tomlBaseType = "Datetime"
tomlString tomlBaseType = "String"
tomlBool tomlBaseType = "Bool"
tomlArray tomlBaseType = "Array"
tomlHash tomlBaseType = "Hash"
tomlArrayHash tomlBaseType = "ArrayHash"
)
// typeOfPrimitive returns a tomlType of any primitive value in TOML.
// Primitive values are: Integer, Float, Datetime, String and Bool.
//
// Passing a lexer item other than the following will cause a BUG message
// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime.
func (p *parser) typeOfPrimitive(lexItem item) tomlType {
switch lexItem.typ {
case itemInteger:
return tomlInteger
case itemFloat:
return tomlFloat
case itemDatetime:
return tomlDatetime
case itemString:
return tomlString
case itemMultilineString:
return tomlString
case itemRawString:
return tomlString
case itemRawMultilineString:
return tomlString
case itemBool:
return tomlBool
}
p.bug("Cannot infer primitive type of lex item '%s'.", lexItem)
panic("unreachable")
}

242
vendor/github.com/BurntSushi/toml/type_fields.go generated vendored Normal file
View file

@ -0,0 +1,242 @@
package toml
// Struct field handling is adapted from code in encoding/json:
//
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the Go distribution.
import (
"reflect"
"sort"
"sync"
)
// A field represents a single field found in a struct.
type field struct {
name string // the name of the field (`toml` tag included)
tag bool // whether field has a `toml` tag
index []int // represents the depth of an anonymous field
typ reflect.Type // the type of the field
}
// byName sorts field by name, breaking ties with depth,
// then breaking ties with "name came from toml tag", then
// breaking ties with index sequence.
type byName []field
func (x byName) Len() int { return len(x) }
func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byName) Less(i, j int) bool {
if x[i].name != x[j].name {
return x[i].name < x[j].name
}
if len(x[i].index) != len(x[j].index) {
return len(x[i].index) < len(x[j].index)
}
if x[i].tag != x[j].tag {
return x[i].tag
}
return byIndex(x).Less(i, j)
}
// byIndex sorts field by index sequence.
type byIndex []field
func (x byIndex) Len() int { return len(x) }
func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] }
func (x byIndex) Less(i, j int) bool {
for k, xik := range x[i].index {
if k >= len(x[j].index) {
return false
}
if xik != x[j].index[k] {
return xik < x[j].index[k]
}
}
return len(x[i].index) < len(x[j].index)
}
// typeFields returns a list of fields that TOML should recognize for the given
// type. The algorithm is breadth-first search over the set of structs to
// include - the top struct and then any reachable anonymous structs.
func typeFields(t reflect.Type) []field {
// Anonymous fields to explore at the current level and the next.
current := []field{}
next := []field{{typ: t}}
// Count of queued names for current level and the next.
count := map[reflect.Type]int{}
nextCount := map[reflect.Type]int{}
// Types already visited at an earlier level.
visited := map[reflect.Type]bool{}
// Fields found.
var fields []field
for len(next) > 0 {
current, next = next, current[:0]
count, nextCount = nextCount, map[reflect.Type]int{}
for _, f := range current {
if visited[f.typ] {
continue
}
visited[f.typ] = true
// Scan f.typ for fields to include.
for i := 0; i < f.typ.NumField(); i++ {
sf := f.typ.Field(i)
if sf.PkgPath != "" && !sf.Anonymous { // unexported
continue
}
opts := getOptions(sf.Tag)
if opts.skip {
continue
}
index := make([]int, len(f.index)+1)
copy(index, f.index)
index[len(f.index)] = i
ft := sf.Type
if ft.Name() == "" && ft.Kind() == reflect.Ptr {
// Follow pointer.
ft = ft.Elem()
}
// Record found field and index sequence.
if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct {
tagged := opts.name != ""
name := opts.name
if name == "" {
name = sf.Name
}
fields = append(fields, field{name, tagged, index, ft})
if count[f.typ] > 1 {
// If there were multiple instances, add a second,
// so that the annihilation code will see a duplicate.
// It only cares about the distinction between 1 or 2,
// so don't bother generating any more copies.
fields = append(fields, fields[len(fields)-1])
}
continue
}
// Record new anonymous struct to explore in next round.
nextCount[ft]++
if nextCount[ft] == 1 {
f := field{name: ft.Name(), index: index, typ: ft}
next = append(next, f)
}
}
}
}
sort.Sort(byName(fields))
// Delete all fields that are hidden by the Go rules for embedded fields,
// except that fields with TOML tags are promoted.
// The fields are sorted in primary order of name, secondary order
// of field index length. Loop over names; for each name, delete
// hidden fields by choosing the one dominant field that survives.
out := fields[:0]
for advance, i := 0, 0; i < len(fields); i += advance {
// One iteration per name.
// Find the sequence of fields with the name of this first field.
fi := fields[i]
name := fi.name
for advance = 1; i+advance < len(fields); advance++ {
fj := fields[i+advance]
if fj.name != name {
break
}
}
if advance == 1 { // Only one field with this name
out = append(out, fi)
continue
}
dominant, ok := dominantField(fields[i : i+advance])
if ok {
out = append(out, dominant)
}
}
fields = out
sort.Sort(byIndex(fields))
return fields
}
// dominantField looks through the fields, all of which are known to
// have the same name, to find the single field that dominates the
// others using Go's embedding rules, modified by the presence of
// TOML tags. If there are multiple top-level fields, the boolean
// will be false: This condition is an error in Go and we skip all
// the fields.
func dominantField(fields []field) (field, bool) {
// The fields are sorted in increasing index-length order. The winner
// must therefore be one with the shortest index length. Drop all
// longer entries, which is easy: just truncate the slice.
length := len(fields[0].index)
tagged := -1 // Index of first tagged field.
for i, f := range fields {
if len(f.index) > length {
fields = fields[:i]
break
}
if f.tag {
if tagged >= 0 {
// Multiple tagged fields at the same level: conflict.
// Return no field.
return field{}, false
}
tagged = i
}
}
if tagged >= 0 {
return fields[tagged], true
}
// All remaining fields have the same length. If there's more than one,
// we have a conflict (two fields named "X" at the same level) and we
// return no field.
if len(fields) > 1 {
return field{}, false
}
return fields[0], true
}
var fieldCache struct {
sync.RWMutex
m map[reflect.Type][]field
}
// cachedTypeFields is like typeFields but uses a cache to avoid repeated work.
func cachedTypeFields(t reflect.Type) []field {
fieldCache.RLock()
f := fieldCache.m[t]
fieldCache.RUnlock()
if f != nil {
return f
}
// Compute fields without lock.
// Might duplicate effort but won't hold other computations back.
f = typeFields(t)
if f == nil {
f = []field{}
}
fieldCache.Lock()
if fieldCache.m == nil {
fieldCache.m = map[reflect.Type][]field{}
}
fieldCache.m[t] = f
fieldCache.Unlock()
return f
}

15
vendor/github.com/Djarvur/go-err113/.gitignore generated vendored Normal file
View file

@ -0,0 +1,15 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/

150
vendor/github.com/Djarvur/go-err113/.golangci.yml generated vendored Normal file
View file

@ -0,0 +1,150 @@
# This file contains all available configuration options
# with their default values.
# options for analysis running
run:
# default concurrency is a available CPU number
concurrency: 4
# timeout for analysis, e.g. 30s, 5m, default is 1m
deadline: 15m
# exit code when at least one issue was found, default is 1
issues-exit-code: 1
# include test files or not, default is true
tests: false
# list of build tags, all linters use it. Default is empty list.
#build-tags:
# - mytag
# which dirs to skip: they won't be analyzed;
# can use regexp here: generated.*, regexp is applied on full path;
# default value is empty list, but next dirs are always skipped independently
# from this option's value:
# vendor$, third_party$, testdata$, examples$, Godeps$, builtin$
skip-dirs:
- /gen$
# which files to skip: they will be analyzed, but issues from them
# won't be reported. Default value is empty list, but there is
# no need to include all autogenerated files, we confidently recognize
# autogenerated files. If it's not please let us know.
skip-files:
- ".*\\.my\\.go$"
- lib/bad.go
- ".*\\.template\\.go$"
# output configuration options
output:
# colored-line-number|line-number|json|tab|checkstyle, default is "colored-line-number"
format: colored-line-number
# print lines of code with issue, default is true
print-issued-lines: true
# print linter name in the end of issue text, default is true
print-linter-name: true
# all available settings of specific linters
linters-settings:
errcheck:
# report about not checking of errors in type assetions: `a := b.(MyStruct)`;
# default is false: such cases aren't reported by default.
check-type-assertions: false
# report about assignment of errors to blank identifier: `num, _ := strconv.Atoi(numStr)`;
# default is false: such cases aren't reported by default.
check-blank: false
govet:
# report about shadowed variables
check-shadowing: true
# Obtain type information from installed (to $GOPATH/pkg) package files:
# golangci-lint will execute `go install -i` and `go test -i` for analyzed packages
# before analyzing them.
# By default this option is disabled and govet gets type information by loader from source code.
# Loading from source code is slow, but it's done only once for all linters.
# Go-installing of packages first time is much slower than loading them from source code,
# therefore this option is disabled by default.
# But repeated installation is fast in go >= 1.10 because of build caching.
# Enable this option only if all conditions are met:
# 1. you use only "fast" linters (--fast e.g.): no program loading occurs
# 2. you use go >= 1.10
# 3. you do repeated runs (false for CI) or cache $GOPATH/pkg or `go env GOCACHE` dir in CI.
use-installed-packages: false
golint:
# minimal confidence for issues, default is 0.8
min-confidence: 0.8
gofmt:
# simplify code: gofmt with `-s` option, true by default
simplify: true
gocyclo:
# minimal code complexity to report, 30 by default (but we recommend 10-20)
min-complexity: 10
maligned:
# print struct with more effective memory layout or not, false by default
suggest-new: true
dupl:
# tokens count to trigger issue, 150 by default
threshold: 100
goconst:
# minimal length of string constant, 3 by default
min-len: 3
# minimal occurrences count to trigger, 3 by default
min-occurrences: 3
depguard:
list-type: blacklist
include-go-root: false
packages:
- github.com/davecgh/go-spew/spew
linters:
#enable:
# - staticcheck
# - unused
# - gosimple
enable-all: true
disable:
- lll
disable-all: false
#presets:
# - bugs
# - unused
fast: false
issues:
# List of regexps of issue texts to exclude, empty list by default.
# But independently from this option we use default exclude patterns,
# it can be disabled by `exclude-use-default: false`. To list all
# excluded by default patterns execute `golangci-lint run --help`
exclude:
- "`parseTained` is unused"
- "`parseState` is unused"
# Independently from option `exclude` we use default exclude patterns,
# it can be disabled by this option. To list all
# excluded by default patterns execute `golangci-lint run --help`.
# Default value for this option is false.
exclude-use-default: false
# Maximum issues count per one linter. Set to 0 to disable. Default is 50.
max-per-linter: 0
# Maximum count of issues with the same text. Set to 0 to disable. Default is 3.
max-same: 0
# Show only new issues: if there are unstaged changes or untracked files,
# only those changes are analyzed, else only changes in HEAD~ are analyzed.
# It's a super-useful option for integration of golangci-lint into existing
# large codebase. It's not practical to fix all existing issues at the moment
# of integration: much better don't allow issues in new code.
# Default is false.
new: false
# Show only new issues created after git revision `REV`
#new-from-rev: REV
# Show only new issues created in git patch with set file path.
#new-from-patch: path/to/patch/file

24
vendor/github.com/Djarvur/go-err113/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,24 @@
language: go
go:
- "1.13"
- "1.14"
- tip
env:
- GO111MODULE=on
before_install:
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- go get golang.org/x/tools/cmd/cover
- go get golang.org/x/tools/cmd/goimports
- wget -O - -q https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh
script:
- test -z "$(goimports -d ./ 2>&1)"
- ./bin/golangci-lint run
- go test -v -race ./...
after_success:
- test "$TRAVIS_GO_VERSION" = "1.14" && goveralls -service=travis-ci

21
vendor/github.com/Djarvur/go-err113/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Djarvur
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

75
vendor/github.com/Djarvur/go-err113/README.adoc generated vendored Normal file
View file

@ -0,0 +1,75 @@
= err113 image:https://godoc.org/github.com/Djarvur/go-err113?status.svg["GoDoc",link="http://godoc.org/github.com/Djarvur/go-err113"] image:https://travis-ci.org/Djarvur/go-err113.svg["Build Status",link="https://travis-ci.org/Djarvur/go-err113"] image:https://coveralls.io/repos/Djarvur/go-err113/badge.svg?branch=master&service=github["Coverage Status",link="https://coveralls.io/github/Djarvur/go-err113?branch=master"]
Daniel Podolsky
:toc:
Golang linter to check the errors handling expressions
== Details
Starting from Go 1.13 the standard `error` type behaviour was changed: one `error` could be derived from another with `fmt.Errorf()` method using `%w` format specifier.
So the errors hierarchy could be built for flexible and responsible errors processing.
And to make this possible at least two simple rules should be followed:
1. `error` values should not be compared directly but with `errors.Is()` method.
1. `error` should not be created dynamically from scratch but by the wrapping the static (package-level) error.
This linter is checking the code for these 2 rules compliance.
=== Reports
So, `err113` reports every `==` and `!=` comparison for exact `error` type variables except comparison to `nil` and `io.EOF`.
Also, any call of `errors.New()` and `fmt.Errorf()` methods are reported except the calls used to initialise package-level variables and the `fmt.Errorf()` calls wrapping the other errors.
Note: non-standard packages, like `github.com/pkg/errors` are ignored completely.
== Install
```
go get -u github.com/Djarvur/go-err113/cmd/err113
```
== Usage
Defined by link:https://pkg.go.dev/golang.org/x/tools/go/analysis/singlechecker[singlechecker] package.
```
err113: checks the error handling rules according to the Go 1.13 new error type
Usage: err113 [-flag] [package]
Flags:
-V print version and exit
-all
no effect (deprecated)
-c int
display offending line with this many lines of context (default -1)
-cpuprofile string
write CPU profile to this file
-debug string
debug flags, any subset of "fpstv"
-fix
apply all suggested fixes
-flags
print analyzer flags in JSON
-json
emit JSON output
-memprofile string
write memory profile to this file
-source
no effect (deprecated)
-tags string
no effect (deprecated)
-trace string
write trace log to this file
-v no effect (deprecated)
```
== Thanks
To link:https://github.com/quasilyte[Iskander (Alex) Sharipov] for the really useful advices.
To link:https://github.com/jackwhelpton[Jack Whelpton] for the bugfix provided.

123
vendor/github.com/Djarvur/go-err113/comparison.go generated vendored Normal file
View file

@ -0,0 +1,123 @@
package err113
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
)
func inspectComparision(pass *analysis.Pass, n ast.Node) bool { // nolint: unparam
// check whether the call expression matches time.Now().Sub()
be, ok := n.(*ast.BinaryExpr)
if !ok {
return true
}
// check if it is a comparison operation
if be.Op != token.EQL && be.Op != token.NEQ {
return true
}
if !areBothErrors(be.X, be.Y, pass.TypesInfo) {
return true
}
oldExpr := render(pass.Fset, be)
negate := ""
if be.Op == token.NEQ {
negate = "!"
}
newExpr := fmt.Sprintf("%s%s.Is(%s, %s)", negate, "errors", rawString(be.X), rawString(be.Y))
pass.Report(
analysis.Diagnostic{
Pos: be.Pos(),
Message: fmt.Sprintf("do not compare errors directly %q, use %q instead", oldExpr, newExpr),
SuggestedFixes: []analysis.SuggestedFix{
{
Message: fmt.Sprintf("should replace %q with %q", oldExpr, newExpr),
TextEdits: []analysis.TextEdit{
{
Pos: be.Pos(),
End: be.End(),
NewText: []byte(newExpr),
},
},
},
},
},
)
return true
}
func isError(v ast.Expr, info *types.Info) bool {
if intf, ok := info.TypeOf(v).Underlying().(*types.Interface); ok {
return intf.NumMethods() == 1 && intf.Method(0).FullName() == "(error).Error"
}
return false
}
func isEOF(ex ast.Expr, info *types.Info) bool {
se, ok := ex.(*ast.SelectorExpr)
if !ok || se.Sel.Name != "EOF" {
return false
}
if ep, ok := asImportedName(se.X, info); !ok || ep != "io" {
return false
}
return true
}
func asImportedName(ex ast.Expr, info *types.Info) (string, bool) {
ei, ok := ex.(*ast.Ident)
if !ok {
return "", false
}
ep, ok := info.ObjectOf(ei).(*types.PkgName)
if !ok {
return "", false
}
return ep.Imported().Path(), true
}
func areBothErrors(x, y ast.Expr, typesInfo *types.Info) bool {
// check that both left and right hand side are not nil
if typesInfo.Types[x].IsNil() || typesInfo.Types[y].IsNil() {
return false
}
// check that both left and right hand side are not io.EOF
if isEOF(x, typesInfo) || isEOF(y, typesInfo) {
return false
}
// check that both left and right hand side are errors
if !isError(x, typesInfo) && !isError(y, typesInfo) {
return false
}
return true
}
func rawString(x ast.Expr) string {
switch t := x.(type) {
case *ast.Ident:
return t.Name
case *ast.SelectorExpr:
return fmt.Sprintf("%s.%s", rawString(t.X), t.Sel.Name)
case *ast.CallExpr:
return fmt.Sprintf("%s()", rawString(t.Fun))
}
return fmt.Sprintf("%s", x)
}

74
vendor/github.com/Djarvur/go-err113/definition.go generated vendored Normal file
View file

@ -0,0 +1,74 @@
package err113
import (
"go/ast"
"go/types"
"strings"
"golang.org/x/tools/go/analysis"
)
var methods2check = map[string]map[string]func(*ast.CallExpr, *types.Info) bool{ // nolint: gochecknoglobals
"errors": {"New": justTrue},
"fmt": {"Errorf": checkWrap},
}
func justTrue(*ast.CallExpr, *types.Info) bool {
return true
}
func checkWrap(ce *ast.CallExpr, info *types.Info) bool {
return !(len(ce.Args) > 0 && strings.Contains(toString(ce.Args[0], info), `%w`))
}
func inspectDefinition(pass *analysis.Pass, tlds map[*ast.CallExpr]struct{}, n ast.Node) bool { //nolint: unparam
// check whether the call expression matches time.Now().Sub()
ce, ok := n.(*ast.CallExpr)
if !ok {
return true
}
if _, ok = tlds[ce]; ok {
return true
}
fn, ok := ce.Fun.(*ast.SelectorExpr)
if !ok {
return true
}
fxName, ok := asImportedName(fn.X, pass.TypesInfo)
if !ok {
return true
}
methods, ok := methods2check[fxName]
if !ok {
return true
}
checkFunc, ok := methods[fn.Sel.Name]
if !ok {
return true
}
if !checkFunc(ce, pass.TypesInfo) {
return true
}
pass.Reportf(
ce.Pos(),
"do not define dynamic errors, use wrapped static errors instead: %q",
render(pass.Fset, ce),
)
return true
}
func toString(ex ast.Expr, info *types.Info) string {
if tv, ok := info.Types[ex]; ok && tv.Value != nil {
return tv.Value.ExactString()
}
return ""
}

90
vendor/github.com/Djarvur/go-err113/err113.go generated vendored Normal file
View file

@ -0,0 +1,90 @@
// Package err113 is a Golang linter to check the errors handling expressions
package err113
import (
"bytes"
"go/ast"
"go/printer"
"go/token"
"golang.org/x/tools/go/analysis"
)
// NewAnalyzer creates a new analysis.Analyzer instance tuned to run err113 checks.
func NewAnalyzer() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "err113",
Doc: "checks the error handling rules according to the Go 1.13 new error type",
Run: run,
}
}
func run(pass *analysis.Pass) (interface{}, error) {
for _, file := range pass.Files {
tlds := enumerateFileDecls(file)
ast.Inspect(
file,
func(n ast.Node) bool {
return inspectComparision(pass, n) &&
inspectDefinition(pass, tlds, n)
},
)
}
return nil, nil
}
// render returns the pretty-print of the given node.
func render(fset *token.FileSet, x interface{}) string {
var buf bytes.Buffer
if err := printer.Fprint(&buf, fset, x); err != nil {
panic(err)
}
return buf.String()
}
func enumerateFileDecls(f *ast.File) map[*ast.CallExpr]struct{} {
res := make(map[*ast.CallExpr]struct{})
var ces []*ast.CallExpr // nolint: prealloc
for _, d := range f.Decls {
ces = append(ces, enumerateDeclVars(d)...)
}
for _, ce := range ces {
res[ce] = struct{}{}
}
return res
}
func enumerateDeclVars(d ast.Decl) (res []*ast.CallExpr) {
td, ok := d.(*ast.GenDecl)
if !ok || td.Tok != token.VAR {
return nil
}
for _, s := range td.Specs {
res = append(res, enumerateSpecValues(s)...)
}
return res
}
func enumerateSpecValues(s ast.Spec) (res []*ast.CallExpr) {
vs, ok := s.(*ast.ValueSpec)
if !ok {
return nil
}
for _, v := range vs.Values {
if ce, ok := v.(*ast.CallExpr); ok {
res = append(res, ce)
}
}
return res
}

5
vendor/github.com/Djarvur/go-err113/go.mod generated vendored Normal file
View file

@ -0,0 +1,5 @@
module github.com/Djarvur/go-err113
go 1.13
require golang.org/x/tools v0.0.0-20200324003944-a576cf524670

20
vendor/github.com/Djarvur/go-err113/go.sum generated vendored Normal file
View file

@ -0,0 +1,20 @@
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/mod v0.2.0 h1:KU7oHjnv3XNWfa5COkzUifxZmxp1TyI7ImMXqFxLwvQ=
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20200324003944-a576cf524670 h1:fW7EP/GZqIvbHessHd1PLca+77TBOsRBqtaybMgXJq8=
golang.org/x/tools v0.0.0-20200324003944-a576cf524670/go.mod h1:Sl4aGygMT6LrqrWclx+PTx3U+LnKx/seiNR+3G19Ar8=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

29
vendor/github.com/Masterminds/semver/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,29 @@
language: go
go:
- 1.6.x
- 1.7.x
- 1.8.x
- 1.9.x
- 1.10.x
- 1.11.x
- 1.12.x
- tip
# Setting sudo access to false will let Travis CI use containers rather than
# VMs to run the tests. For more details see:
# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/
# - http://docs.travis-ci.com/user/workers/standard-infrastructure/
sudo: false
script:
- make setup
- make test
notifications:
webhooks:
urls:
- https://webhooks.gitter.im/e/06e3328629952dabe3e0
on_success: change # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: never # options: [always|never|change] default: always

109
vendor/github.com/Masterminds/semver/CHANGELOG.md generated vendored Normal file
View file

@ -0,0 +1,109 @@
# 1.5.0 (2019-09-11)
## Added
- #103: Add basic fuzzing for `NewVersion()` (thanks @jesse-c)
## Changed
- #82: Clarify wildcard meaning in range constraints and update tests for it (thanks @greysteil)
- #83: Clarify caret operator range for pre-1.0.0 dependencies (thanks @greysteil)
- #72: Adding docs comment pointing to vert for a cli
- #71: Update the docs on pre-release comparator handling
- #89: Test with new go versions (thanks @thedevsaddam)
- #87: Added $ to ValidPrerelease for better validation (thanks @jeremycarroll)
## Fixed
- #78: Fix unchecked error in example code (thanks @ravron)
- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
- #97: Fixed copyright file for proper display on GitHub
- #107: Fix handling prerelease when sorting alphanum and num
- #109: Fixed where Validate sometimes returns wrong message on error
# 1.4.2 (2018-04-10)
## Changed
- #72: Updated the docs to point to vert for a console appliaction
- #71: Update the docs on pre-release comparator handling
## Fixed
- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
# 1.4.1 (2018-04-02)
## Fixed
- Fixed #64: Fix pre-release precedence issue (thanks @uudashr)
# 1.4.0 (2017-10-04)
## Changed
- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill)
# 1.3.1 (2017-07-10)
## Fixed
- Fixed #57: number comparisons in prerelease sometimes inaccurate
# 1.3.0 (2017-05-02)
## Added
- #45: Added json (un)marshaling support (thanks @mh-cbon)
- Stability marker. See https://masterminds.github.io/stability/
## Fixed
- #51: Fix handling of single digit tilde constraint (thanks @dgodd)
## Changed
- #55: The godoc icon moved from png to svg
# 1.2.3 (2017-04-03)
## Fixed
- #46: Fixed 0.x.x and 0.0.x in constraints being treated as *
# Release 1.2.2 (2016-12-13)
## Fixed
- #34: Fixed issue where hyphen range was not working with pre-release parsing.
# Release 1.2.1 (2016-11-28)
## Fixed
- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha"
properly.
# Release 1.2.0 (2016-11-04)
## Added
- #20: Added MustParse function for versions (thanks @adamreese)
- #15: Added increment methods on versions (thanks @mh-cbon)
## Fixed
- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and
might not satisfy the intended compatibility. The change here ignores pre-releases
on constraint checks (e.g., ~ or ^) when a pre-release is not part of the
constraint. For example, `^1.2.3` will ignore pre-releases while
`^1.2.3-alpha` will include them.
# Release 1.1.1 (2016-06-30)
## Changed
- Issue #9: Speed up version comparison performance (thanks @sdboyer)
- Issue #8: Added benchmarks (thanks @sdboyer)
- Updated Go Report Card URL to new location
- Updated Readme to add code snippet formatting (thanks @mh-cbon)
- Updating tagging to v[SemVer] structure for compatibility with other tools.
# Release 1.1.0 (2016-03-11)
- Issue #2: Implemented validation to provide reasons a versions failed a
constraint.
# Release 1.0.1 (2015-12-31)
- Fixed #1: * constraint failing on valid versions.
# Release 1.0.0 (2015-10-20)
- Initial release

19
vendor/github.com/Masterminds/semver/LICENSE.txt generated vendored Normal file
View file

@ -0,0 +1,19 @@
Copyright (C) 2014-2019, Matt Butcher and Matt Farina
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

36
vendor/github.com/Masterminds/semver/Makefile generated vendored Normal file
View file

@ -0,0 +1,36 @@
.PHONY: setup
setup:
go get -u gopkg.in/alecthomas/gometalinter.v1
gometalinter.v1 --install
.PHONY: test
test: validate lint
@echo "==> Running tests"
go test -v
.PHONY: validate
validate:
@echo "==> Running static validations"
@gometalinter.v1 \
--disable-all \
--enable deadcode \
--severity deadcode:error \
--enable gofmt \
--enable gosimple \
--enable ineffassign \
--enable misspell \
--enable vet \
--tests \
--vendor \
--deadline 60s \
./... || exit_code=1
.PHONY: lint
lint:
@echo "==> Running linters"
@gometalinter.v1 \
--disable-all \
--enable golint \
--vendor \
--deadline 60s \
./... || :

194
vendor/github.com/Masterminds/semver/README.md generated vendored Normal file
View file

@ -0,0 +1,194 @@
# SemVer
The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to:
* Parse semantic versions
* Sort semantic versions
* Check if a semantic version fits within a set of constraints
* Optionally work with a `v` prefix
[![Stability:
Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html)
[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver)
If you are looking for a command line tool for version comparisons please see
[vert](https://github.com/Masterminds/vert) which uses this library.
## Parsing Semantic Versions
To parse a semantic version use the `NewVersion` function. For example,
```go
v, err := semver.NewVersion("1.2.3-beta.1+build345")
```
If there is an error the version wasn't parseable. The version object has methods
to get the parts of the version, compare it to other versions, convert the
version back into a string, and get the original string. For more details
please see the [documentation](https://godoc.org/github.com/Masterminds/semver).
## Sorting Semantic Versions
A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/)
package from the standard library. For example,
```go
raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
vs := make([]*semver.Version, len(raw))
for i, r := range raw {
v, err := semver.NewVersion(r)
if err != nil {
t.Errorf("Error parsing version: %s", err)
}
vs[i] = v
}
sort.Sort(semver.Collection(vs))
```
## Checking Version Constraints
Checking a version against version constraints is one of the most featureful
parts of the package.
```go
c, err := semver.NewConstraint(">= 1.2.3")
if err != nil {
// Handle constraint not being parseable.
}
v, _ := semver.NewVersion("1.3")
if err != nil {
// Handle version not being parseable.
}
// Check if the version meets the constraints. The a variable will be true.
a := c.Check(v)
```
## Basic Comparisons
There are two elements to the comparisons. First, a comparison string is a list
of comma separated and comparisons. These are then separated by || separated or
comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
greater than or equal to 4.2.3.
The basic comparisons are:
* `=`: equal (aliased to no operator)
* `!=`: not equal
* `>`: greater than
* `<`: less than
* `>=`: greater than or equal to
* `<=`: less than or equal to
## Working With Pre-release Versions
Pre-releases, for those not familiar with them, are used for software releases
prior to stable or generally available releases. Examples of pre-releases include
development, alpha, beta, and release candidate releases. A pre-release may be
a version such as `1.2.3-beta.1` while the stable release would be `1.2.3`. In the
order of precidence, pre-releases come before their associated releases. In this
example `1.2.3-beta.1 < 1.2.3`.
According to the Semantic Version specification pre-releases may not be
API compliant with their release counterpart. It says,
> A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version.
SemVer comparisons without a pre-release comparator will skip pre-release versions.
For example, `>=1.2.3` will skip pre-releases when looking at a list of releases
while `>=1.2.3-0` will evaluate and find pre-releases.
The reason for the `0` as a pre-release version in the example comparison is
because pre-releases can only contain ASCII alphanumerics and hyphens (along with
`.` separators), per the spec. Sorting happens in ASCII sort order, again per the spec. The lowest character is a `0` in ASCII sort order (see an [ASCII Table](http://www.asciitable.com/))
Understanding ASCII sort ordering is important because A-Z comes before a-z. That
means `>=1.2.3-BETA` will return `1.2.3-alpha`. What you might expect from case
sensitivity doesn't apply here. This is due to ASCII sort ordering which is what
the spec specifies.
## Hyphen Range Comparisons
There are multiple methods to handle ranges and the first is hyphens ranges.
These look like:
* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
## Wildcards In Comparisons
The `x`, `X`, and `*` characters can be used as a wildcard character. This works
for all comparison operators. When used on the `=` operator it falls
back to the pack level comparison (see tilde below). For example,
* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
* `>= 1.2.x` is equivalent to `>= 1.2.0`
* `<= 2.x` is equivalent to `< 3`
* `*` is equivalent to `>= 0.0.0`
## Tilde Range Comparisons (Patch)
The tilde (`~`) comparison operator is for patch level ranges when a minor
version is specified and major level changes when the minor number is missing.
For example,
* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
* `~1` is equivalent to `>= 1, < 2`
* `~2.3` is equivalent to `>= 2.3, < 2.4`
* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
* `~1.x` is equivalent to `>= 1, < 2`
## Caret Range Comparisons (Major)
The caret (`^`) comparison operator is for major level changes. This is useful
when comparisons of API versions as a major change is API breaking. For example,
* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
* `^0.0.1` is equivalent to `>= 0.0.1, < 1.0.0`
* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
* `^2.3` is equivalent to `>= 2.3, < 3`
* `^2.x` is equivalent to `>= 2.0.0, < 3`
# Validation
In addition to testing a version against a constraint, a version can be validated
against a constraint. When validation fails a slice of errors containing why a
version didn't meet the constraint is returned. For example,
```go
c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
if err != nil {
// Handle constraint not being parseable.
}
v, _ := semver.NewVersion("1.3")
if err != nil {
// Handle version not being parseable.
}
// Validate a version against a constraint.
a, msgs := c.Validate(v)
// a is false
for _, m := range msgs {
fmt.Println(m)
// Loops over the errors which would read
// "1.3 is greater than 1.2.3"
// "1.3 is less than 1.4"
}
```
# Fuzzing
[dvyukov/go-fuzz](https://github.com/dvyukov/go-fuzz) is used for fuzzing.
1. `go-fuzz-build`
2. `go-fuzz -workdir=fuzz`
# Contribute
If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues)
or [create a pull request](https://github.com/Masterminds/semver/pulls).

44
vendor/github.com/Masterminds/semver/appveyor.yml generated vendored Normal file
View file

@ -0,0 +1,44 @@
version: build-{build}.{branch}
clone_folder: C:\gopath\src\github.com\Masterminds\semver
shallow_clone: true
environment:
GOPATH: C:\gopath
platform:
- x64
install:
- go version
- go env
- go get -u gopkg.in/alecthomas/gometalinter.v1
- set PATH=%PATH%;%GOPATH%\bin
- gometalinter.v1.exe --install
build_script:
- go install -v ./...
test_script:
- "gometalinter.v1 \
--disable-all \
--enable deadcode \
--severity deadcode:error \
--enable gofmt \
--enable gosimple \
--enable ineffassign \
--enable misspell \
--enable vet \
--tests \
--vendor \
--deadline 60s \
./... || exit_code=1"
- "gometalinter.v1 \
--disable-all \
--enable golint \
--vendor \
--deadline 60s \
./... || :"
- go test -v
deploy: off

24
vendor/github.com/Masterminds/semver/collection.go generated vendored Normal file
View file

@ -0,0 +1,24 @@
package semver
// Collection is a collection of Version instances and implements the sort
// interface. See the sort package for more details.
// https://golang.org/pkg/sort/
type Collection []*Version
// Len returns the length of a collection. The number of Version instances
// on the slice.
func (c Collection) Len() int {
return len(c)
}
// Less is needed for the sort interface to compare two Version objects on the
// slice. If checks if one is less than the other.
func (c Collection) Less(i, j int) bool {
return c[i].LessThan(c[j])
}
// Swap is needed for the sort interface to replace the Version objects
// at two different positions in the slice.
func (c Collection) Swap(i, j int) {
c[i], c[j] = c[j], c[i]
}

423
vendor/github.com/Masterminds/semver/constraints.go generated vendored Normal file
View file

@ -0,0 +1,423 @@
package semver
import (
"errors"
"fmt"
"regexp"
"strings"
)
// Constraints is one or more constraint that a semantic version can be
// checked against.
type Constraints struct {
constraints [][]*constraint
}
// NewConstraint returns a Constraints instance that a Version instance can
// be checked against. If there is a parse error it will be returned.
func NewConstraint(c string) (*Constraints, error) {
// Rewrite - ranges into a comparison operation.
c = rewriteRange(c)
ors := strings.Split(c, "||")
or := make([][]*constraint, len(ors))
for k, v := range ors {
cs := strings.Split(v, ",")
result := make([]*constraint, len(cs))
for i, s := range cs {
pc, err := parseConstraint(s)
if err != nil {
return nil, err
}
result[i] = pc
}
or[k] = result
}
o := &Constraints{constraints: or}
return o, nil
}
// Check tests if a version satisfies the constraints.
func (cs Constraints) Check(v *Version) bool {
// loop over the ORs and check the inner ANDs
for _, o := range cs.constraints {
joy := true
for _, c := range o {
if !c.check(v) {
joy = false
break
}
}
if joy {
return true
}
}
return false
}
// Validate checks if a version satisfies a constraint. If not a slice of
// reasons for the failure are returned in addition to a bool.
func (cs Constraints) Validate(v *Version) (bool, []error) {
// loop over the ORs and check the inner ANDs
var e []error
// Capture the prerelease message only once. When it happens the first time
// this var is marked
var prerelesase bool
for _, o := range cs.constraints {
joy := true
for _, c := range o {
// Before running the check handle the case there the version is
// a prerelease and the check is not searching for prereleases.
if c.con.pre == "" && v.pre != "" {
if !prerelesase {
em := fmt.Errorf("%s is a prerelease version and the constraint is only looking for release versions", v)
e = append(e, em)
prerelesase = true
}
joy = false
} else {
if !c.check(v) {
em := fmt.Errorf(c.msg, v, c.orig)
e = append(e, em)
joy = false
}
}
}
if joy {
return true, []error{}
}
}
return false, e
}
var constraintOps map[string]cfunc
var constraintMsg map[string]string
var constraintRegex *regexp.Regexp
func init() {
constraintOps = map[string]cfunc{
"": constraintTildeOrEqual,
"=": constraintTildeOrEqual,
"!=": constraintNotEqual,
">": constraintGreaterThan,
"<": constraintLessThan,
">=": constraintGreaterThanEqual,
"=>": constraintGreaterThanEqual,
"<=": constraintLessThanEqual,
"=<": constraintLessThanEqual,
"~": constraintTilde,
"~>": constraintTilde,
"^": constraintCaret,
}
constraintMsg = map[string]string{
"": "%s is not equal to %s",
"=": "%s is not equal to %s",
"!=": "%s is equal to %s",
">": "%s is less than or equal to %s",
"<": "%s is greater than or equal to %s",
">=": "%s is less than %s",
"=>": "%s is less than %s",
"<=": "%s is greater than %s",
"=<": "%s is greater than %s",
"~": "%s does not have same major and minor version as %s",
"~>": "%s does not have same major and minor version as %s",
"^": "%s does not have same major version as %s",
}
ops := make([]string, 0, len(constraintOps))
for k := range constraintOps {
ops = append(ops, regexp.QuoteMeta(k))
}
constraintRegex = regexp.MustCompile(fmt.Sprintf(
`^\s*(%s)\s*(%s)\s*$`,
strings.Join(ops, "|"),
cvRegex))
constraintRangeRegex = regexp.MustCompile(fmt.Sprintf(
`\s*(%s)\s+-\s+(%s)\s*`,
cvRegex, cvRegex))
}
// An individual constraint
type constraint struct {
// The callback function for the restraint. It performs the logic for
// the constraint.
function cfunc
msg string
// The version used in the constraint check. For example, if a constraint
// is '<= 2.0.0' the con a version instance representing 2.0.0.
con *Version
// The original parsed version (e.g., 4.x from != 4.x)
orig string
// When an x is used as part of the version (e.g., 1.x)
minorDirty bool
dirty bool
patchDirty bool
}
// Check if a version meets the constraint
func (c *constraint) check(v *Version) bool {
return c.function(v, c)
}
type cfunc func(v *Version, c *constraint) bool
func parseConstraint(c string) (*constraint, error) {
m := constraintRegex.FindStringSubmatch(c)
if m == nil {
return nil, fmt.Errorf("improper constraint: %s", c)
}
ver := m[2]
orig := ver
minorDirty := false
patchDirty := false
dirty := false
if isX(m[3]) {
ver = "0.0.0"
dirty = true
} else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" {
minorDirty = true
dirty = true
ver = fmt.Sprintf("%s.0.0%s", m[3], m[6])
} else if isX(strings.TrimPrefix(m[5], ".")) {
dirty = true
patchDirty = true
ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6])
}
con, err := NewVersion(ver)
if err != nil {
// The constraintRegex should catch any regex parsing errors. So,
// we should never get here.
return nil, errors.New("constraint Parser Error")
}
cs := &constraint{
function: constraintOps[m[1]],
msg: constraintMsg[m[1]],
con: con,
orig: orig,
minorDirty: minorDirty,
patchDirty: patchDirty,
dirty: dirty,
}
return cs, nil
}
// Constraint functions
func constraintNotEqual(v *Version, c *constraint) bool {
if c.dirty {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if c.con.Major() != v.Major() {
return true
}
if c.con.Minor() != v.Minor() && !c.minorDirty {
return true
} else if c.minorDirty {
return false
}
return false
}
return !v.Equal(c.con)
}
func constraintGreaterThan(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
return v.Compare(c.con) == 1
}
func constraintLessThan(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if !c.dirty {
return v.Compare(c.con) < 0
}
if v.Major() > c.con.Major() {
return false
} else if v.Minor() > c.con.Minor() && !c.minorDirty {
return false
}
return true
}
func constraintGreaterThanEqual(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
return v.Compare(c.con) >= 0
}
func constraintLessThanEqual(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if !c.dirty {
return v.Compare(c.con) <= 0
}
if v.Major() > c.con.Major() {
return false
} else if v.Minor() > c.con.Minor() && !c.minorDirty {
return false
}
return true
}
// ~*, ~>* --> >= 0.0.0 (any)
// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
func constraintTilde(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if v.LessThan(c.con) {
return false
}
// ~0.0.0 is a special case where all constraints are accepted. It's
// equivalent to >= 0.0.0.
if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 &&
!c.minorDirty && !c.patchDirty {
return true
}
if v.Major() != c.con.Major() {
return false
}
if v.Minor() != c.con.Minor() && !c.minorDirty {
return false
}
return true
}
// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
// it's a straight =
func constraintTildeOrEqual(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if c.dirty {
c.msg = constraintMsg["~"]
return constraintTilde(v, c)
}
return v.Equal(c.con)
}
// ^* --> (any)
// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0
// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0
// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0
// ^1.2.3 --> >=1.2.3, <2.0.0
// ^1.2.0 --> >=1.2.0, <2.0.0
func constraintCaret(v *Version, c *constraint) bool {
// If there is a pre-release on the version but the constraint isn't looking
// for them assume that pre-releases are not compatible. See issue 21 for
// more details.
if v.Prerelease() != "" && c.con.Prerelease() == "" {
return false
}
if v.LessThan(c.con) {
return false
}
if v.Major() != c.con.Major() {
return false
}
return true
}
var constraintRangeRegex *regexp.Regexp
const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
func isX(x string) bool {
switch x {
case "x", "*", "X":
return true
default:
return false
}
}
func rewriteRange(i string) string {
m := constraintRangeRegex.FindAllStringSubmatch(i, -1)
if m == nil {
return i
}
o := i
for _, v := range m {
t := fmt.Sprintf(">= %s, <= %s", v[1], v[11])
o = strings.Replace(o, v[0], t, 1)
}
return o
}

115
vendor/github.com/Masterminds/semver/doc.go generated vendored Normal file
View file

@ -0,0 +1,115 @@
/*
Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go.
Specifically it provides the ability to:
* Parse semantic versions
* Sort semantic versions
* Check if a semantic version fits within a set of constraints
* Optionally work with a `v` prefix
Parsing Semantic Versions
To parse a semantic version use the `NewVersion` function. For example,
v, err := semver.NewVersion("1.2.3-beta.1+build345")
If there is an error the version wasn't parseable. The version object has methods
to get the parts of the version, compare it to other versions, convert the
version back into a string, and get the original string. For more details
please see the documentation at https://godoc.org/github.com/Masterminds/semver.
Sorting Semantic Versions
A set of versions can be sorted using the `sort` package from the standard library.
For example,
raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
vs := make([]*semver.Version, len(raw))
for i, r := range raw {
v, err := semver.NewVersion(r)
if err != nil {
t.Errorf("Error parsing version: %s", err)
}
vs[i] = v
}
sort.Sort(semver.Collection(vs))
Checking Version Constraints
Checking a version against version constraints is one of the most featureful
parts of the package.
c, err := semver.NewConstraint(">= 1.2.3")
if err != nil {
// Handle constraint not being parseable.
}
v, err := semver.NewVersion("1.3")
if err != nil {
// Handle version not being parseable.
}
// Check if the version meets the constraints. The a variable will be true.
a := c.Check(v)
Basic Comparisons
There are two elements to the comparisons. First, a comparison string is a list
of comma separated and comparisons. These are then separated by || separated or
comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
greater than or equal to 4.2.3.
The basic comparisons are:
* `=`: equal (aliased to no operator)
* `!=`: not equal
* `>`: greater than
* `<`: less than
* `>=`: greater than or equal to
* `<=`: less than or equal to
Hyphen Range Comparisons
There are multiple methods to handle ranges and the first is hyphens ranges.
These look like:
* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
Wildcards In Comparisons
The `x`, `X`, and `*` characters can be used as a wildcard character. This works
for all comparison operators. When used on the `=` operator it falls
back to the pack level comparison (see tilde below). For example,
* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
* `>= 1.2.x` is equivalent to `>= 1.2.0`
* `<= 2.x` is equivalent to `<= 3`
* `*` is equivalent to `>= 0.0.0`
Tilde Range Comparisons (Patch)
The tilde (`~`) comparison operator is for patch level ranges when a minor
version is specified and major level changes when the minor number is missing.
For example,
* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
* `~1` is equivalent to `>= 1, < 2`
* `~2.3` is equivalent to `>= 2.3, < 2.4`
* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
* `~1.x` is equivalent to `>= 1, < 2`
Caret Range Comparisons (Major)
The caret (`^`) comparison operator is for major level changes. This is useful
when comparisons of API versions as a major change is API breaking. For example,
* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
* `^2.3` is equivalent to `>= 2.3, < 3`
* `^2.x` is equivalent to `>= 2.0.0, < 3`
*/
package semver

425
vendor/github.com/Masterminds/semver/version.go generated vendored Normal file
View file

@ -0,0 +1,425 @@
package semver
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"regexp"
"strconv"
"strings"
)
// The compiled version of the regex created at init() is cached here so it
// only needs to be created once.
var versionRegex *regexp.Regexp
var validPrereleaseRegex *regexp.Regexp
var (
// ErrInvalidSemVer is returned a version is found to be invalid when
// being parsed.
ErrInvalidSemVer = errors.New("Invalid Semantic Version")
// ErrInvalidMetadata is returned when the metadata is an invalid format
ErrInvalidMetadata = errors.New("Invalid Metadata string")
// ErrInvalidPrerelease is returned when the pre-release is an invalid format
ErrInvalidPrerelease = errors.New("Invalid Prerelease string")
)
// SemVerRegex is the regular expression used to parse a semantic version.
const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
`(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
`(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
// ValidPrerelease is the regular expression which validates
// both prerelease and metadata values.
const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)$`
// Version represents a single semantic version.
type Version struct {
major, minor, patch int64
pre string
metadata string
original string
}
func init() {
versionRegex = regexp.MustCompile("^" + SemVerRegex + "$")
validPrereleaseRegex = regexp.MustCompile(ValidPrerelease)
}
// NewVersion parses a given version and returns an instance of Version or
// an error if unable to parse the version.
func NewVersion(v string) (*Version, error) {
m := versionRegex.FindStringSubmatch(v)
if m == nil {
return nil, ErrInvalidSemVer
}
sv := &Version{
metadata: m[8],
pre: m[5],
original: v,
}
var temp int64
temp, err := strconv.ParseInt(m[1], 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
}
sv.major = temp
if m[2] != "" {
temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
}
sv.minor = temp
} else {
sv.minor = 0
}
if m[3] != "" {
temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64)
if err != nil {
return nil, fmt.Errorf("Error parsing version segment: %s", err)
}
sv.patch = temp
} else {
sv.patch = 0
}
return sv, nil
}
// MustParse parses a given version and panics on error.
func MustParse(v string) *Version {
sv, err := NewVersion(v)
if err != nil {
panic(err)
}
return sv
}
// String converts a Version object to a string.
// Note, if the original version contained a leading v this version will not.
// See the Original() method to retrieve the original value. Semantic Versions
// don't contain a leading v per the spec. Instead it's optional on
// implementation.
func (v *Version) String() string {
var buf bytes.Buffer
fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch)
if v.pre != "" {
fmt.Fprintf(&buf, "-%s", v.pre)
}
if v.metadata != "" {
fmt.Fprintf(&buf, "+%s", v.metadata)
}
return buf.String()
}
// Original returns the original value passed in to be parsed.
func (v *Version) Original() string {
return v.original
}
// Major returns the major version.
func (v *Version) Major() int64 {
return v.major
}
// Minor returns the minor version.
func (v *Version) Minor() int64 {
return v.minor
}
// Patch returns the patch version.
func (v *Version) Patch() int64 {
return v.patch
}
// Prerelease returns the pre-release version.
func (v *Version) Prerelease() string {
return v.pre
}
// Metadata returns the metadata on the version.
func (v *Version) Metadata() string {
return v.metadata
}
// originalVPrefix returns the original 'v' prefix if any.
func (v *Version) originalVPrefix() string {
// Note, only lowercase v is supported as a prefix by the parser.
if v.original != "" && v.original[:1] == "v" {
return v.original[:1]
}
return ""
}
// IncPatch produces the next patch version.
// If the current version does not have prerelease/metadata information,
// it unsets metadata and prerelease values, increments patch number.
// If the current version has any of prerelease or metadata information,
// it unsets both values and keeps curent patch value
func (v Version) IncPatch() Version {
vNext := v
// according to http://semver.org/#spec-item-9
// Pre-release versions have a lower precedence than the associated normal version.
// according to http://semver.org/#spec-item-10
// Build metadata SHOULD be ignored when determining version precedence.
if v.pre != "" {
vNext.metadata = ""
vNext.pre = ""
} else {
vNext.metadata = ""
vNext.pre = ""
vNext.patch = v.patch + 1
}
vNext.original = v.originalVPrefix() + "" + vNext.String()
return vNext
}
// IncMinor produces the next minor version.
// Sets patch to 0.
// Increments minor number.
// Unsets metadata.
// Unsets prerelease status.
func (v Version) IncMinor() Version {
vNext := v
vNext.metadata = ""
vNext.pre = ""
vNext.patch = 0
vNext.minor = v.minor + 1
vNext.original = v.originalVPrefix() + "" + vNext.String()
return vNext
}
// IncMajor produces the next major version.
// Sets patch to 0.
// Sets minor to 0.
// Increments major number.
// Unsets metadata.
// Unsets prerelease status.
func (v Version) IncMajor() Version {
vNext := v
vNext.metadata = ""
vNext.pre = ""
vNext.patch = 0
vNext.minor = 0
vNext.major = v.major + 1
vNext.original = v.originalVPrefix() + "" + vNext.String()
return vNext
}
// SetPrerelease defines the prerelease value.
// Value must not include the required 'hypen' prefix.
func (v Version) SetPrerelease(prerelease string) (Version, error) {
vNext := v
if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) {
return vNext, ErrInvalidPrerelease
}
vNext.pre = prerelease
vNext.original = v.originalVPrefix() + "" + vNext.String()
return vNext, nil
}
// SetMetadata defines metadata value.
// Value must not include the required 'plus' prefix.
func (v Version) SetMetadata(metadata string) (Version, error) {
vNext := v
if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) {
return vNext, ErrInvalidMetadata
}
vNext.metadata = metadata
vNext.original = v.originalVPrefix() + "" + vNext.String()
return vNext, nil
}
// LessThan tests if one version is less than another one.
func (v *Version) LessThan(o *Version) bool {
return v.Compare(o) < 0
}
// GreaterThan tests if one version is greater than another one.
func (v *Version) GreaterThan(o *Version) bool {
return v.Compare(o) > 0
}
// Equal tests if two versions are equal to each other.
// Note, versions can be equal with different metadata since metadata
// is not considered part of the comparable version.
func (v *Version) Equal(o *Version) bool {
return v.Compare(o) == 0
}
// Compare compares this version to another one. It returns -1, 0, or 1 if
// the version smaller, equal, or larger than the other version.
//
// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is
// lower than the version without a prerelease.
func (v *Version) Compare(o *Version) int {
// Compare the major, minor, and patch version for differences. If a
// difference is found return the comparison.
if d := compareSegment(v.Major(), o.Major()); d != 0 {
return d
}
if d := compareSegment(v.Minor(), o.Minor()); d != 0 {
return d
}
if d := compareSegment(v.Patch(), o.Patch()); d != 0 {
return d
}
// At this point the major, minor, and patch versions are the same.
ps := v.pre
po := o.Prerelease()
if ps == "" && po == "" {
return 0
}
if ps == "" {
return 1
}
if po == "" {
return -1
}
return comparePrerelease(ps, po)
}
// UnmarshalJSON implements JSON.Unmarshaler interface.
func (v *Version) UnmarshalJSON(b []byte) error {
var s string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
temp, err := NewVersion(s)
if err != nil {
return err
}
v.major = temp.major
v.minor = temp.minor
v.patch = temp.patch
v.pre = temp.pre
v.metadata = temp.metadata
v.original = temp.original
temp = nil
return nil
}
// MarshalJSON implements JSON.Marshaler interface.
func (v *Version) MarshalJSON() ([]byte, error) {
return json.Marshal(v.String())
}
func compareSegment(v, o int64) int {
if v < o {
return -1
}
if v > o {
return 1
}
return 0
}
func comparePrerelease(v, o string) int {
// split the prelease versions by their part. The separator, per the spec,
// is a .
sparts := strings.Split(v, ".")
oparts := strings.Split(o, ".")
// Find the longer length of the parts to know how many loop iterations to
// go through.
slen := len(sparts)
olen := len(oparts)
l := slen
if olen > slen {
l = olen
}
// Iterate over each part of the prereleases to compare the differences.
for i := 0; i < l; i++ {
// Since the lentgh of the parts can be different we need to create
// a placeholder. This is to avoid out of bounds issues.
stemp := ""
if i < slen {
stemp = sparts[i]
}
otemp := ""
if i < olen {
otemp = oparts[i]
}
d := comparePrePart(stemp, otemp)
if d != 0 {
return d
}
}
// Reaching here means two versions are of equal value but have different
// metadata (the part following a +). They are not identical in string form
// but the version comparison finds them to be equal.
return 0
}
func comparePrePart(s, o string) int {
// Fastpath if they are equal
if s == o {
return 0
}
// When s or o are empty we can use the other in an attempt to determine
// the response.
if s == "" {
if o != "" {
return -1
}
return 1
}
if o == "" {
if s != "" {
return 1
}
return -1
}
// When comparing strings "99" is greater than "103". To handle
// cases like this we need to detect numbers and compare them. According
// to the semver spec, numbers are always positive. If there is a - at the
// start like -99 this is to be evaluated as an alphanum. numbers always
// have precedence over alphanum. Parsing as Uints because negative numbers
// are ignored.
oi, n1 := strconv.ParseUint(o, 10, 64)
si, n2 := strconv.ParseUint(s, 10, 64)
// The case where both are strings compare the strings
if n1 != nil && n2 != nil {
if s > o {
return 1
}
return -1
} else if n1 != nil {
// o is a string and s is a number
return -1
} else if n2 != nil {
// s is a string and o is a number
return 1
}
// Both are numbers
if si > oi {
return 1
}
return -1
}

10
vendor/github.com/Masterminds/semver/version_fuzz.go generated vendored Normal file
View file

@ -0,0 +1,10 @@
// +build gofuzz
package semver
func Fuzz(data []byte) int {
if _, err := NewVersion(string(data)); err != nil {
return 0
}
return 1
}

14
vendor/github.com/OpenPeeDeeP/depguard/.gitignore generated vendored Normal file
View file

@ -0,0 +1,14 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
.idea

674
vendor/github.com/OpenPeeDeeP/depguard/LICENSE generated vendored Normal file
View file

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.

77
vendor/github.com/OpenPeeDeeP/depguard/README.md generated vendored Normal file
View file

@ -0,0 +1,77 @@
# Depguard
Go linter that checks package imports are in a list of acceptable packages. It
supports a white list and black list option and can do prefix or glob matching.
This allows you to allow imports from a whole organization or only
allow specific packages within a repository. It is recommended to use prefix
matching as it is faster than glob matching. The fewer glob matches the better.
> If a pattern is matched by prefix it does not try to match via glob.
## Install
```bash
go get -u github.com/OpenPeeDeeP/depguard
```
## Config
By default, Depguard looks for a file named `.depguard.json` in the current
current working directory. If it is somewhere else, pass in the `-c` flag with
the location of your configuration file.
The following is an example configuration file.
```json
{
"type": "whitelist",
"packages": ["github.com/OpenPeeDeeP/depguard"],
"packageErrorMessages": {
"github.com/OpenPeeDeeP/depguards": "Please use \"github.com/OpenPeeDeeP/depguard\","
},
"inTests": ["github.com/stretchr/testify"],
"includeGoStdLib": true
}
```
- `type` can be either `whitelist` or `blacklist`. This check is case insensitive.
If not specified the default is `blacklist`.
- `packages` is a list of packages for the list type specified.
- `packageErrorMessages` is a mapping from packages to the error message to display
- `inTests` is a list of packages allowed/disallowed only in test files.
- Set `includeGoStdLib` (`includeGoRoot` for backwards compatability) to true if you want to check the list against standard lib.
If not specified the default is false.
## Gometalinter
The binary installation of this linter can be used with
[Gometalinter](github.com/alecthomas/gometalinter).
If you use a configuration file for Gometalinter then the following will need to
be added to your configuration file.
```json
{
"linters": {
"depguard": {
"command": "depguard -c path/to/config.json",
"pattern": "PATH:LINE:COL:MESSAGE",
"installFrom": "github.com/OpenPeeDeeP/depguard",
"isFast": true,
"partitionStrategy": "packages"
}
}
}
```
If you prefer the command line way the following will work for you as well.
```bash
gometalinter --linter='depguard:depguard -c path/to/config.json:PATH:LINE:COL:MESSAGE'
```
## Golangci-lint
This linter was built with
[Golangci-lint](https://github.com/golangci/golangci-lint) in mind. It is compatable
and read their docs to see how to implement all their linters, including this one.

241
vendor/github.com/OpenPeeDeeP/depguard/depguard.go generated vendored Normal file
View file

@ -0,0 +1,241 @@
package depguard
import (
"go/build"
"go/token"
"io/ioutil"
"path"
"sort"
"strings"
"github.com/gobwas/glob"
"golang.org/x/tools/go/loader"
)
// ListType states what kind of list is passed in.
type ListType int
const (
// LTBlacklist states the list given is a blacklist. (default)
LTBlacklist ListType = iota
// LTWhitelist states the list given is a whitelist.
LTWhitelist
)
// StringToListType makes it easier to turn a string into a ListType.
// It assumes that the string representation is lower case.
var StringToListType = map[string]ListType{
"whitelist": LTWhitelist,
"blacklist": LTBlacklist,
}
// Issue with the package with PackageName at the Position.
type Issue struct {
PackageName string
Position token.Position
}
// Depguard checks imports to make sure they follow the given list and constraints.
type Depguard struct {
ListType ListType
IncludeGoRoot bool
Packages []string
prefixPackages []string
globPackages []glob.Glob
TestPackages []string
prefixTestPackages []string
globTestPackages []glob.Glob
prefixRoot []string
}
// Run checks for dependencies given the program and validates them against
// Packages.
func (dg *Depguard) Run(config *loader.Config, prog *loader.Program) ([]*Issue, error) {
// Shortcut execution on an empty blacklist as that means every package is allowed
if dg.ListType == LTBlacklist && len(dg.Packages) == 0 {
return nil, nil
}
if err := dg.initialize(config, prog); err != nil {
return nil, err
}
directImports, err := dg.createImportMap(prog)
if err != nil {
return nil, err
}
var issues []*Issue
for pkg, positions := range directImports {
for _, pos := range positions {
prefixList, globList := dg.prefixPackages, dg.globPackages
if len(dg.TestPackages) > 0 && strings.Index(pos.Filename, "_test.go") != -1 {
prefixList, globList = dg.prefixTestPackages, dg.globTestPackages
}
if dg.flagIt(pkg, prefixList, globList) {
issues = append(issues, &Issue{
PackageName: pkg,
Position: pos,
})
}
}
}
return issues, nil
}
func (dg *Depguard) initialize(config *loader.Config, prog *loader.Program) error {
// parse ordinary guarded packages
for _, pkg := range dg.Packages {
if strings.ContainsAny(pkg, "!?*[]{}") {
g, err := glob.Compile(pkg, '/')
if err != nil {
return err
}
dg.globPackages = append(dg.globPackages, g)
} else {
dg.prefixPackages = append(dg.prefixPackages, pkg)
}
}
// Sort the packages so we can have a faster search in the array
sort.Strings(dg.prefixPackages)
// parse guarded tests packages
for _, pkg := range dg.TestPackages {
if strings.ContainsAny(pkg, "!?*[]{}") {
g, err := glob.Compile(pkg, '/')
if err != nil {
return err
}
dg.globTestPackages = append(dg.globTestPackages, g)
} else {
dg.prefixTestPackages = append(dg.prefixTestPackages, pkg)
}
}
// Sort the test packages so we can have a faster search in the array
sort.Strings(dg.prefixTestPackages)
if !dg.IncludeGoRoot {
var err error
dg.prefixRoot, err = listRootPrefixs(config.Build)
if err != nil {
return err
}
}
return nil
}
func (dg *Depguard) createImportMap(prog *loader.Program) (map[string][]token.Position, error) {
importMap := make(map[string][]token.Position)
// For the directly imported packages
for _, imported := range prog.InitialPackages() {
// Go through their files
for _, file := range imported.Files {
// And populate a map of all direct imports and their positions
// This will filter out GoRoot depending on the Depguard.IncludeGoRoot
for _, fileImport := range file.Imports {
fileImportPath := cleanBasicLitString(fileImport.Path.Value)
if !dg.IncludeGoRoot && dg.isRoot(fileImportPath) {
continue
}
position := prog.Fset.Position(fileImport.Pos())
positions, found := importMap[fileImportPath]
if !found {
importMap[fileImportPath] = []token.Position{
position,
}
continue
}
importMap[fileImportPath] = append(positions, position)
}
}
}
return importMap, nil
}
func pkgInList(pkg string, prefixList []string, globList []glob.Glob) bool {
if pkgInPrefixList(pkg, prefixList) {
return true
}
return pkgInGlobList(pkg, globList)
}
func pkgInPrefixList(pkg string, prefixList []string) bool {
// Idx represents where in the package slice the passed in package would go
// when sorted. -1 Just means that it would be at the very front of the slice.
idx := sort.Search(len(prefixList), func(i int) bool {
return prefixList[i] > pkg
}) - 1
// This means that the package passed in has no way to be prefixed by anything
// in the package list as it is already smaller then everything
if idx == -1 {
return false
}
return strings.HasPrefix(pkg, prefixList[idx])
}
func pkgInGlobList(pkg string, globList []glob.Glob) bool {
for _, g := range globList {
if g.Match(pkg) {
return true
}
}
return false
}
// InList | WhiteList | BlackList
// y | | x
// n | x |
func (dg *Depguard) flagIt(pkg string, prefixList []string, globList []glob.Glob) bool {
return pkgInList(pkg, prefixList, globList) == (dg.ListType == LTBlacklist)
}
func cleanBasicLitString(value string) string {
return strings.Trim(value, "\"\\")
}
// We can do this as all imports that are not root are either prefixed with a domain
// or prefixed with `./` or `/` to dictate it is a local file reference
func listRootPrefixs(buildCtx *build.Context) ([]string, error) {
if buildCtx == nil {
buildCtx = &build.Default
}
root := path.Join(buildCtx.GOROOT, "src")
fs, err := ioutil.ReadDir(root)
if err != nil {
return nil, err
}
var pkgPrefix []string
for _, f := range fs {
if !f.IsDir() {
continue
}
pkgPrefix = append(pkgPrefix, f.Name())
}
return pkgPrefix, nil
}
func (dg *Depguard) isRoot(importPath string) bool {
// Idx represents where in the package slice the passed in package would go
// when sorted. -1 Just means that it would be at the very front of the slice.
idx := sort.Search(len(dg.prefixRoot), func(i int) bool {
return dg.prefixRoot[i] > importPath
}) - 1
// This means that the package passed in has no way to be prefixed by anything
// in the package list as it is already smaller then everything
if idx == -1 {
return false
}
// if it is prefixed by a root prefix we need to check if it is an exact match
// or prefix with `/` as this could return false posative if the domain was
// `archive.com` for example as `archive` is a go root package.
if strings.HasPrefix(importPath, dg.prefixRoot[idx]) {
return strings.HasPrefix(importPath, dg.prefixRoot[idx]+"/") || importPath == dg.prefixRoot[idx]
}
return false
}

9
vendor/github.com/OpenPeeDeeP/depguard/go.mod generated vendored Normal file
View file

@ -0,0 +1,9 @@
module github.com/OpenPeeDeeP/depguard
go 1.13
require (
github.com/gobwas/glob v0.2.3
github.com/kisielk/gotool v1.0.0
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b
)

6
vendor/github.com/OpenPeeDeeP/depguard/go.sum generated vendored Normal file
View file

@ -0,0 +1,6 @@
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b h1:7tibmaEqrQYA+q6ri7NQjuxqSwechjtDHKq6/e85S38=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=

21
vendor/github.com/alexkohler/prealloc/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Alex Kohler
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

267
vendor/github.com/alexkohler/prealloc/pkg/prealloc.go generated vendored Normal file
View file

@ -0,0 +1,267 @@
package pkg
import (
"fmt"
"go/ast"
"go/token"
)
type sliceDeclaration struct {
name string
// sType string
genD *ast.GenDecl
}
type returnsVisitor struct {
// flags
simple bool
includeRangeLoops bool
includeForLoops bool
// visitor fields
sliceDeclarations []*sliceDeclaration
preallocHints []Hint
returnsInsideOfLoop bool
arrayTypes []string
}
func Check(files []*ast.File, simple, includeRangeLoops, includeForLoops bool) []Hint {
hints := []Hint{}
for _, f := range files {
retVis := &returnsVisitor{
simple: simple,
includeRangeLoops: includeRangeLoops,
includeForLoops: includeForLoops,
}
ast.Walk(retVis, f)
// if simple is true, then we actually have to check if we had returns
// inside of our loop. Otherwise, we can just report all messages.
if !retVis.simple || !retVis.returnsInsideOfLoop {
hints = append(hints, retVis.preallocHints...)
}
}
return hints
}
func contains(slice []string, item string) bool {
for _, s := range slice {
if s == item {
return true
}
}
return false
}
func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
v.sliceDeclarations = nil
v.returnsInsideOfLoop = false
switch n := node.(type) {
case *ast.TypeSpec:
if _, ok := n.Type.(*ast.ArrayType); ok {
if n.Name != nil {
v.arrayTypes = append(v.arrayTypes, n.Name.Name)
}
}
case *ast.FuncDecl:
if n.Body != nil {
for _, stmt := range n.Body.List {
switch s := stmt.(type) {
// Find non pre-allocated slices
case *ast.DeclStmt:
genD, ok := s.Decl.(*ast.GenDecl)
if !ok {
continue
}
if genD.Tok == token.TYPE {
for _, spec := range genD.Specs {
tSpec, ok := spec.(*ast.TypeSpec)
if !ok {
continue
}
if _, ok := tSpec.Type.(*ast.ArrayType); ok {
if tSpec.Name != nil {
v.arrayTypes = append(v.arrayTypes, tSpec.Name.Name)
}
}
}
} else if genD.Tok == token.VAR {
for _, spec := range genD.Specs {
vSpec, ok := spec.(*ast.ValueSpec)
if !ok {
continue
}
var isArrType bool
switch val := vSpec.Type.(type) {
case *ast.ArrayType:
isArrType = true
case *ast.Ident:
isArrType = contains(v.arrayTypes, val.Name)
}
if isArrType {
if vSpec.Names != nil {
/*atID, ok := arrayType.Elt.(*ast.Ident)
if !ok {
continue
}*/
// We should handle multiple slices declared on same line e.g. var mySlice1, mySlice2 []uint32
for _, vName := range vSpec.Names {
v.sliceDeclarations = append(v.sliceDeclarations, &sliceDeclaration{name: vName.Name /*sType: atID.Name,*/, genD: genD})
}
}
}
}
}
case *ast.RangeStmt:
if v.includeRangeLoops {
if len(v.sliceDeclarations) == 0 {
continue
}
// Check the value being ranged over and ensure it's not a channel (we cannot offer any recommendations on channel ranges).
rangeIdent, ok := s.X.(*ast.Ident)
if ok && rangeIdent.Obj != nil {
valueSpec, ok := rangeIdent.Obj.Decl.(*ast.ValueSpec)
if ok {
if _, rangeTargetIsChannel := valueSpec.Type.(*ast.ChanType); rangeTargetIsChannel {
continue
}
}
}
if s.Body != nil {
v.handleLoops(s.Body)
}
}
case *ast.ForStmt:
if v.includeForLoops {
if len(v.sliceDeclarations) == 0 {
continue
}
if s.Body != nil {
v.handleLoops(s.Body)
}
}
default:
}
}
}
}
return v
}
// handleLoops is a helper function to share the logic required for both *ast.RangeLoops and *ast.ForLoops
func (v *returnsVisitor) handleLoops(blockStmt *ast.BlockStmt) {
for _, stmt := range blockStmt.List {
switch bodyStmt := stmt.(type) {
case *ast.AssignStmt:
asgnStmt := bodyStmt
for index, expr := range asgnStmt.Rhs {
if index >= len(asgnStmt.Lhs) {
continue
}
lhsIdent, ok := asgnStmt.Lhs[index].(*ast.Ident)
if !ok {
continue
}
callExpr, ok := expr.(*ast.CallExpr)
if !ok {
continue
}
rhsFuncIdent, ok := callExpr.Fun.(*ast.Ident)
if !ok {
continue
}
if rhsFuncIdent.Name != "append" {
continue
}
// e.g., `x = append(x)`
// Pointless, but pre-allocation will not help.
if len(callExpr.Args) < 2 {
continue
}
rhsIdent, ok := callExpr.Args[0].(*ast.Ident)
if !ok {
continue
}
// e.g., `x = append(y, a)`
// This is weird (and maybe a logic error),
// but we cannot recommend pre-allocation.
if lhsIdent.Name != rhsIdent.Name {
continue
}
// e.g., `x = append(x, y...)`
// we should ignore this. Pre-allocating in this case
// is confusing, and is not possible in general.
if callExpr.Ellipsis.IsValid() {
continue
}
for _, sliceDecl := range v.sliceDeclarations {
if sliceDecl.name == lhsIdent.Name {
// This is a potential mark, we just need to make sure there are no returns/continues in the
// range loop.
// now we just need to grab whatever we're ranging over
/*sxIdent, ok := s.X.(*ast.Ident)
if !ok {
continue
}*/
v.preallocHints = append(v.preallocHints, Hint{
Pos: sliceDecl.genD.Pos(),
DeclaredSliceName: sliceDecl.name,
})
}
}
}
case *ast.IfStmt:
ifStmt := bodyStmt
if ifStmt.Body != nil {
for _, ifBodyStmt := range ifStmt.Body.List {
// TODO should probably handle embedded ifs here
switch /*ift :=*/ ifBodyStmt.(type) {
case *ast.BranchStmt, *ast.ReturnStmt:
v.returnsInsideOfLoop = true
default:
}
}
}
default:
}
}
}
// Hint stores the information about an occurrence of a slice that could be
// preallocated.
type Hint struct {
Pos token.Pos
DeclaredSliceName string
}
func (h Hint) String() string {
return fmt.Sprintf("%v: Consider preallocating %v", h.Pos, h.DeclaredSliceName)
}
func (h Hint) StringFromFS(f *token.FileSet) string {
file := f.File(h.Pos)
lineNumber := file.Position(h.Pos).Line
return fmt.Sprintf("%v:%v Consider preallocating %v", file.Name(), lineNumber, h.DeclaredSliceName)
}

13
vendor/github.com/ashanbrown/forbidigo/LICENSE generated vendored Normal file
View file

@ -0,0 +1,13 @@
Copyright 2019 Andrew Shannon Brown
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,45 @@
package forbidigo
// Code generated by github.com/launchdarkly/go-options. DO NOT EDIT.
type ApplyOptionFunc func(c *config) error
func (f ApplyOptionFunc) apply(c *config) error {
return f(c)
}
func newConfig(options ...Option) (config, error) {
var c config
err := applyConfigOptions(&c, options...)
return c, err
}
func applyConfigOptions(c *config, options ...Option) error {
c.ExcludeGodocExamples = true
for _, o := range options {
if err := o.apply(c); err != nil {
return err
}
}
return nil
}
type Option interface {
apply(*config) error
}
// OptionExcludeGodocExamples don't check inside Godoc examples (see https://blog.golang.org/examples)
func OptionExcludeGodocExamples(o bool) ApplyOptionFunc {
return func(c *config) error {
c.ExcludeGodocExamples = o
return nil
}
}
// OptionIgnorePermitDirectives don't check for `permit` directives(for example, in favor of `nolint`)
func OptionIgnorePermitDirectives(o bool) ApplyOptionFunc {
return func(c *config) error {
c.IgnorePermitDirectives = o
return nil
}
}

View file

@ -0,0 +1,193 @@
// forbidigo provides a linter for forbidding the use of specific identifiers
package forbidigo
import (
"bytes"
"fmt"
"go/ast"
"go/printer"
"go/token"
"log"
"regexp"
"strings"
"github.com/pkg/errors"
)
type Issue interface {
Details() string
Position() token.Position
String() string
}
type UsedIssue struct {
identifier string
pattern string
position token.Position
}
func (a UsedIssue) Details() string {
return fmt.Sprintf("use of `%s` forbidden by pattern `%s`", a.identifier, a.pattern)
}
func (a UsedIssue) Position() token.Position {
return a.position
}
func (a UsedIssue) String() string { return toString(a) }
func toString(i Issue) string {
return fmt.Sprintf("%s at %s", i.Details(), i.Position())
}
type Linter struct {
cfg config
patterns []*regexp.Regexp
}
func DefaultPatterns() []string {
return []string{`^(fmt\.Print(|f|ln)|print|println)$`}
}
//go:generate go-options config
type config struct {
// don't check inside Godoc examples (see https://blog.golang.org/examples)
ExcludeGodocExamples bool `options:",true"`
IgnorePermitDirectives bool // don't check for `permit` directives(for example, in favor of `nolint`)
}
func NewLinter(patterns []string, options ...Option) (*Linter, error) {
cfg, err := newConfig(options...)
if err != nil {
return nil, errors.Wrapf(err, "failed to process options")
}
if len(patterns) == 0 {
patterns = DefaultPatterns()
}
compiledPatterns := make([]*regexp.Regexp, 0, len(patterns))
for _, p := range patterns {
re, err := regexp.Compile(p)
if err != nil {
return nil, fmt.Errorf("unable to compile pattern `%s`: %s", p, err)
}
compiledPatterns = append(compiledPatterns, re)
}
return &Linter{
cfg: cfg,
patterns: compiledPatterns,
}, nil
}
type visitor struct {
cfg config
isTestFile bool // godoc only runs on test files
linter *Linter
comments []*ast.CommentGroup
fset *token.FileSet
issues []Issue
}
func (l *Linter) Run(fset *token.FileSet, nodes ...ast.Node) ([]Issue, error) {
var issues []Issue //nolint:prealloc // we don't know how many there will be
for _, node := range nodes {
var comments []*ast.CommentGroup
isTestFile := false
isWholeFileExample := false
if file, ok := node.(*ast.File); ok {
comments = file.Comments
fileName := fset.Position(file.Pos()).Filename
isTestFile = strings.HasSuffix(fileName, "_test.go")
// From https://blog.golang.org/examples, a "whole file example" is:
// a file that ends in _test.go and contains exactly one example function,
// no test or benchmark functions, and at least one other package-level declaration.
if l.cfg.ExcludeGodocExamples && isTestFile && len(file.Decls) > 1 {
numExamples := 0
numTestsAndBenchmarks := 0
for _, decl := range file.Decls {
funcDecl, isFuncDecl := decl.(*ast.FuncDecl)
// consider only functions, not methods
if !isFuncDecl || funcDecl.Recv != nil || funcDecl.Name == nil {
continue
}
funcName := funcDecl.Name.Name
if strings.HasPrefix(funcName, "Test") || strings.HasPrefix(funcName, "Benchmark") {
numTestsAndBenchmarks++
break // not a whole file example
}
if strings.HasPrefix(funcName, "Example") {
numExamples++
}
}
// if this is a whole file example, skip this node
isWholeFileExample = numExamples == 1 && numTestsAndBenchmarks == 0
}
}
if isWholeFileExample {
continue
}
visitor := visitor{
cfg: l.cfg,
isTestFile: isTestFile,
linter: l,
fset: fset,
comments: comments,
}
ast.Walk(&visitor, node)
issues = append(issues, visitor.issues...)
}
return issues, nil
}
func (v *visitor) Visit(node ast.Node) ast.Visitor {
switch node := node.(type) {
case *ast.FuncDecl:
// don't descend into godoc examples if we are ignoring them
isGodocExample := v.isTestFile && node.Recv == nil && node.Name != nil && strings.HasPrefix(node.Name.Name, "Example")
if isGodocExample && v.cfg.ExcludeGodocExamples {
return nil
}
return v
case *ast.SelectorExpr:
case *ast.Ident:
default:
return v
}
for _, p := range v.linter.patterns {
if p.MatchString(v.textFor(node)) && !v.permit(node) {
v.issues = append(v.issues, UsedIssue{
identifier: v.textFor(node),
pattern: p.String(),
position: v.fset.Position(node.Pos()),
})
}
}
return nil
}
func (v *visitor) textFor(node ast.Node) string {
buf := new(bytes.Buffer)
if err := printer.Fprint(buf, v.fset, node); err != nil {
log.Fatalf("ERROR: unable to print node at %s: %s", v.fset.Position(node.Pos()), err)
}
return buf.String()
}
func (v *visitor) permit(node ast.Node) bool {
if v.cfg.IgnorePermitDirectives {
return false
}
nodePos := v.fset.Position(node.Pos())
var nolint = regexp.MustCompile(fmt.Sprintf(`^//\s?permit:%s\b`, regexp.QuoteMeta(v.textFor(node))))
for _, c := range v.comments {
commentPos := v.fset.Position(c.Pos())
if commentPos.Line == nodePos.Line && len(c.List) > 0 && nolint.MatchString(c.List[0].Text) {
return true
}
}
return false
}

13
vendor/github.com/ashanbrown/makezero/LICENSE generated vendored Normal file
View file

@ -0,0 +1,13 @@
Copyright 2019 Andrew Shannon Brown
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View file

@ -0,0 +1,200 @@
// makezero provides a linter for appends to slices initialized with non-zero length.
package makezero
import (
"bytes"
"fmt"
"go/ast"
"go/printer"
"go/token"
"go/types"
"log"
"regexp"
)
type Issue interface {
Details() string
Position() token.Position
String() string
}
type AppendIssue struct {
name string
position token.Position
}
func (a AppendIssue) Details() string {
return fmt.Sprintf("append to slice `%s` with non-zero initialized length", a.name)
}
func (a AppendIssue) Position() token.Position {
return a.position
}
func (a AppendIssue) String() string { return toString(a) }
type MustHaveNonZeroInitLenIssue struct {
name string
position token.Position
}
func (i MustHaveNonZeroInitLenIssue) Details() string {
return fmt.Sprintf("slice `%s` does not have non-zero initial length", i.name)
}
func (i MustHaveNonZeroInitLenIssue) Position() token.Position {
return i.position
}
func (i MustHaveNonZeroInitLenIssue) String() string { return toString(i) }
func toString(i Issue) string {
return fmt.Sprintf("%s at %s", i.Details(), i.Position())
}
type visitor struct {
initLenMustBeZero bool
comments []*ast.CommentGroup // comments to apply during this visit
info *types.Info
nonZeroLengthSliceDecls map[interface{}]struct{}
fset *token.FileSet
issues []Issue
}
type Linter struct {
initLenMustBeZero bool
}
func NewLinter(initialLengthMustBeZero bool) *Linter {
return &Linter{
initLenMustBeZero: initialLengthMustBeZero,
}
}
func (l Linter) Run(fset *token.FileSet, info *types.Info, nodes ...ast.Node) ([]Issue, error) {
var issues []Issue // nolint:prealloc // don't know how many there will be
for _, node := range nodes {
var comments []*ast.CommentGroup
if file, ok := node.(*ast.File); ok {
comments = file.Comments
}
visitor := visitor{
nonZeroLengthSliceDecls: make(map[interface{}]struct{}),
initLenMustBeZero: l.initLenMustBeZero,
info: info,
fset: fset,
comments: comments,
}
ast.Walk(&visitor, node)
issues = append(issues, visitor.issues...)
}
return issues, nil
}
func (v *visitor) Visit(node ast.Node) ast.Visitor {
switch node := node.(type) {
case *ast.CallExpr:
fun, ok := node.Fun.(*ast.Ident)
if !ok || fun.Name != "append" {
break
}
if sliceIdent, ok := node.Args[0].(*ast.Ident); ok &&
v.hasNonZeroInitialLength(sliceIdent) &&
!v.hasNoLintOnSameLine(fun) {
v.issues = append(v.issues, AppendIssue{name: sliceIdent.Name, position: v.fset.Position(fun.Pos())})
}
case *ast.AssignStmt:
for i, right := range node.Rhs {
if right, ok := right.(*ast.CallExpr); ok {
fun, ok := right.Fun.(*ast.Ident)
if !ok || fun.Name != "make" {
continue
}
left := node.Lhs[i]
if len(right.Args) == 2 {
// ignore if not a slice or it has explicit zero length
if !v.isSlice(right.Args[0]) {
break
} else if lit, ok := right.Args[1].(*ast.BasicLit); ok && lit.Kind == token.INT && lit.Value == "0" {
break
}
if v.initLenMustBeZero && !v.hasNoLintOnSameLine(fun) {
v.issues = append(v.issues, MustHaveNonZeroInitLenIssue{
name: v.textFor(left),
position: v.fset.Position(node.Pos()),
})
}
v.recordNonZeroLengthSlices(left)
}
}
}
}
return v
}
func (v *visitor) textFor(node ast.Node) string {
typeBuf := new(bytes.Buffer)
if err := printer.Fprint(typeBuf, v.fset, node); err != nil {
log.Fatalf("ERROR: unable to print type: %s", err)
}
return typeBuf.String()
}
func (v *visitor) hasNonZeroInitialLength(ident *ast.Ident) bool {
if ident.Obj == nil {
log.Printf("WARNING: could not determine with %q at %s is a slice (missing object type)",
ident.Name, v.fset.Position(ident.Pos()).String())
return false
}
_, exists := v.nonZeroLengthSliceDecls[ident.Obj.Decl]
return exists
}
func (v *visitor) recordNonZeroLengthSlices(node ast.Node) {
ident, ok := node.(*ast.Ident)
if !ok {
return
}
if ident.Obj == nil {
return
}
v.nonZeroLengthSliceDecls[ident.Obj.Decl] = struct{}{}
}
func (v *visitor) isSlice(node ast.Node) bool {
// determine type if this is a user-defined type
if ident, ok := node.(*ast.Ident); ok {
obj := ident.Obj
if obj == nil {
if v.info != nil {
_, ok := v.info.ObjectOf(ident).Type().(*types.Slice)
return ok
}
return false
}
spec, ok := obj.Decl.(*ast.TypeSpec)
if !ok {
return false
}
node = spec.Type
}
if node, ok := node.(*ast.ArrayType); ok {
return node.Len == nil // only slices have zero length
}
return false
}
func (v *visitor) hasNoLintOnSameLine(node ast.Node) bool {
var nolint = regexp.MustCompile(`^\s*nozero\b`)
nodePos := v.fset.Position(node.Pos())
for _, c := range v.comments {
commentPos := v.fset.Position(c.Pos())
if commentPos.Line == nodePos.Line && nolint.MatchString(c.Text()) {
return true
}
}
return false
}

21
vendor/github.com/bkielbasa/cyclop/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2020 Bartłomiej Klimczak
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -0,0 +1,104 @@
package analyzer
import (
"flag"
"go/ast"
"go/token"
"strings"
"golang.org/x/tools/go/analysis"
)
var (
flagSet flag.FlagSet
)
var maxComplexity int
var packageAverage float64
var skipTests bool
func NewAnalyzer() *analysis.Analyzer {
return &analysis.Analyzer{
Name: "cyclop",
Doc: "calculates cyclomatic complexity",
Run: run,
Flags: flagSet,
}
}
func init() {
flagSet.IntVar(&maxComplexity, "maxComplexity", 10, "max complexity the function can have")
flagSet.Float64Var(&packageAverage, "packageAverage", 0, "max avarage complexity in package")
flagSet.BoolVar(&skipTests, "skipTests", false, "should the linter execute on test files as well")
}
func run(pass *analysis.Pass) (interface{}, error) {
var sum, count float64
var pkgName string
var pkgPos token.Pos
for _, f := range pass.Files {
ast.Inspect(f, func(node ast.Node) bool {
f, ok := node.(*ast.FuncDecl)
if !ok {
if node == nil {
return true
}
if file, ok := node.(*ast.File); ok {
pkgName = file.Name.Name
pkgPos = node.Pos()
}
// we check function by function
return true
}
if skipTests && testFunc(f) {
return true
}
count++
comp := complexity(f)
sum += float64(comp)
if comp > maxComplexity {
pass.Reportf(node.Pos(), "calculated cyclomatic complexity for function %s is %d, max is %d", f.Name.Name, comp, maxComplexity)
}
return true
})
}
if packageAverage > 0 {
avg := sum / count
if avg > packageAverage {
pass.Reportf(pkgPos, "the avarage complexity for the package %s is %f, max is %f", pkgName, avg, packageAverage)
}
}
return nil, nil
}
func testFunc(f *ast.FuncDecl) bool {
return strings.HasPrefix(f.Name.Name, "Test")
}
func complexity(fn *ast.FuncDecl) int {
v := complexityVisitor{}
ast.Walk(&v, fn)
return v.Complexity
}
type complexityVisitor struct {
Complexity int
}
func (v *complexityVisitor) Visit(n ast.Node) ast.Visitor {
switch n := n.(type) {
case *ast.FuncDecl, *ast.IfStmt, *ast.ForStmt, *ast.RangeStmt, *ast.CaseClause, *ast.CommClause:
v.Complexity++
case *ast.BinaryExpr:
if n.Op == token.LAND || n.Op == token.LOR {
v.Complexity++
}
}
return v
}

70
vendor/github.com/bombsimon/wsl/v3/.gitignore generated vendored Normal file
View file

@ -0,0 +1,70 @@
# Created by https://www.gitignore.io/api/go,vim,macos
### Go ###
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, build with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
### Go Patch ###
/vendor/
/Godeps/
### macOS ###
# General
.DS_Store
.AppleDouble
.LSOverride
# Icon must end with two \r
Icon
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
### Vim ###
# Swap
[._]*.s[a-v][a-z]
[._]*.sw[a-p]
[._]s[a-rt-v][a-z]
[._]ss[a-gi-z]
[._]sw[a-p]
# Session
Session.vim
# Temporary
.netrwhist
*~
# Auto-generated tag files
tags
# Persistent undo
[._]*.un~
# End of https://www.gitignore.io/api/go,vim,macos

25
vendor/github.com/bombsimon/wsl/v3/.travis.yml generated vendored Normal file
View file

@ -0,0 +1,25 @@
---
language: go
go:
- 1.13.x
- 1.12.x
- 1.11.x
env:
global:
- GO111MODULE=on
install:
- go get -v golang.org/x/tools/cmd/cover github.com/mattn/goveralls
script:
- go test -v -covermode=count -coverprofile=coverage.out
after_script:
- $HOME/gopath/bin/goveralls -coverprofile=coverage.out -service=travis-ci
notifications:
email: false
# vim: set ts=2 sw=2 et:

21
vendor/github.com/bombsimon/wsl/v3/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2018 Simon Sawert
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

126
vendor/github.com/bombsimon/wsl/v3/README.md generated vendored Normal file
View file

@ -0,0 +1,126 @@
# WSL - Whitespace Linter
[![forthebadge](https://forthebadge.com/images/badges/made-with-go.svg)](https://forthebadge.com)
[![forthebadge](https://forthebadge.com/images/badges/built-with-love.svg)](https://forthebadge.com)
[![Build Status](https://travis-ci.org/bombsimon/wsl.svg?branch=master)](https://travis-ci.org/bombsimon/wsl)
[![Coverage Status](https://coveralls.io/repos/github/bombsimon/wsl/badge.svg?branch=master)](https://coveralls.io/github/bombsimon/wsl?branch=master)
WSL is a linter that enforces a very **non scientific** vision of how to make
code more readable by enforcing empty lines at the right places.
I think too much code out there is to cuddly and a bit too warm for it's own
good, making it harder for other people to read and understand. The linter will
warn about newlines in and around blocks, in the beginning of files and other
places in the code.
**I know this linter is aggressive** and a lot of projects I've tested it on
have failed miserably. For this linter to be useful at all I want to be open to
new ideas, configurations and discussions! Also note that some of the warnings
might be bugs or unintentional false positives so I would love an
[issue](https://github.com/bombsimon/wsl/issues/new) to fix, discuss, change or
make something configurable!
## Installation
### By `go get` (local installation)
You can do that by using:
```sh
go get -u github.com/bombsimon/wsl/cmd/...
```
### By golangci-lint (CI automation)
`wsl` is already integrated with
[golangci-lint](https://github.com/golangci/golangci-lint). Please refer to the
instructions there.
## Usage
How to use depends on how you install `wsl`.
### With local binary
The general command format for `wsl` is:
```sh
$ wsl [flags] <file1> [files...]
$ wsl [flags] </path/to/package/...>
# Examples
$ wsl ./main.go
$ wsl --no-test ./main.go
$ wsl --allow-cuddle-declarations ./main.go
$ wsl --no-test --allow-cuddle-declaration ./main.go
$ wsl --no-test --allow-trailing-comment ./myProject/...
```
The "..." wildcard is not used like other `go` commands but instead can only
be to a relative or absolute path.
By default, the linter will run on `./...` which means all go files in the
current path and all subsequent paths, including test files. To disable linting
test files, use `-n` or `--no-test`.
### By `golangci-lint` (CI automation)
The recommended command is:
```sh
golangci-lint run --disable-all --enable wsl
```
For more information, please refer to
[golangci-lint](https://github.com/golangci/golangci-lint)'s documentation.
## Issues and configuration
The linter suppers a few ways to configure it to satisfy more than one kind of
code style. These settings could be set either with flags or with YAML
configuration if used via `golangci-lint`.
The supported configuration can be found [in the documentation](doc/configuration.md).
Below are the available checklist for any hit from `wsl`. If you do not see any,
feel free to raise an [issue](https://github.com/bombsimon/wsl/issues/new).
> **Note**: this linter doesn't take in consideration the issues that will be
> fixed with `go fmt -s` so ensure that the code is properly formatted before
> use.
* [Anonymous switch statements should never be cuddled](doc/rules.md#anonymous-switch-statements-should-never-be-cuddled)
* [Append only allowed to cuddle with appended value](doc/rules.md#append-only-allowed-to-cuddle-with-appended-value)
* [Assignments should only be cuddled with other assignments](doc/rules.md#assignments-should-only-be-cuddled-with-other-assignments)
* [Block should not end with a whitespace (or comment)](doc/rules.md#block-should-not-end-with-a-whitespace-or-comment)
* [Block should not start with a whitespace](doc/rules.md#block-should-not-start-with-a-whitespace)
* [Case block should end with newline at this size](doc/rules.md#case-block-should-end-with-newline-at-this-size)
* [Branch statements should not be cuddled if block has more than two lines](doc/rules.md#branch-statements-should-not-be-cuddled-if-block-has-more-than-two-lines)
* [Declarations should never be cuddled](doc/rules.md#declarations-should-never-be-cuddled)
* [Defer statements should only be cuddled with expressions on same variable](doc/rules.md#defer-statements-should-only-be-cuddled-with-expressions-on-same-variable)
* [Expressions should not be cuddled with blocks](doc/rules.md#expressions-should-not-be-cuddled-with-blocks)
* [Expressions should not be cuddled with declarations or returns](doc/rules.md#expressions-should-not-be-cuddled-with-declarations-or-returns)
* [For statement without condition should never be cuddled](doc/rules.md#for-statement-without-condition-should-never-be-cuddled)
* [For statements should only be cuddled with assignments used in the iteration](doc/rules.md#for-statements-should-only-be-cuddled-with-assignments-used-in-the-iteration)
* [Go statements can only invoke functions assigned on line above](doc/rules.md#go-statements-can-only-invoke-functions-assigned-on-line-above)
* [If statements should only be cuddled with assignments](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments)
* [If statements should only be cuddled with assignments used in the if
statement
itself](doc/rules.md#if-statements-should-only-be-cuddled-with-assignments-used-in-the-if-statement-itself)
* [If statements that check an error must be cuddled with the statement that assigned the error](doc/rules.md#if-statements-that-check-an-error-must-be-cuddled-with-the-statement-that-assigned-the-error)
* [Only cuddled expressions if assigning variable or using from line
above](doc/rules.md#only-cuddled-expressions-if-assigning-variable-or-using-from-line-above)
* [Only one cuddle assignment allowed before defer statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-defer-statement)
* [Only one cuddle assginment allowed before for statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-for-statement)
* [Only one cuddle assignment allowed before go statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-go-statement)
* [Only one cuddle assignment allowed before if statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-if-statement)
* [Only one cuddle assignment allowed before range statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-range-statement)
* [Only one cuddle assignment allowed before switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-switch-statement)
* [Only one cuddle assignment allowed before type switch statement](doc/rules.md#only-one-cuddle-assignment-allowed-before-type-switch-statement)
* [Ranges should only be cuddled with assignments used in the iteration](doc/rules.md#ranges-should-only-be-cuddled-with-assignments-used-in-the-iteration)
* [Return statements should not be cuddled if block has more than two lines](doc/rules.md#return-statements-should-not-be-cuddled-if-block-has-more-than-two-lines)
* [Short declarations should cuddle only with other short declarations](doc/rules.md#short-declaration-should-cuddle-only-with-other-short-declarations)
* [Switch statements should only be cuddled with variables switched](doc/rules.md#switch-statements-should-only-be-cuddled-with-variables-switched)
* [Type switch statements should only be cuddled with variables switched](doc/rules.md#type-switch-statements-should-only-be-cuddled-with-variables-switched)

12
vendor/github.com/bombsimon/wsl/v3/go.mod generated vendored Normal file
View file

@ -0,0 +1,12 @@
module github.com/bombsimon/wsl/v3
go 1.12
require (
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/stretchr/testify v1.5.1
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/yaml.v2 v2.2.8 // indirect
)

25
vendor/github.com/bombsimon/wsl/v3/go.sum generated vendored Normal file
View file

@ -0,0 +1,25 @@
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

1247
vendor/github.com/bombsimon/wsl/v3/wsl.go generated vendored Normal file

File diff suppressed because it is too large Load diff

1
vendor/github.com/charithe/durationcheck/.gitignore generated vendored Normal file
View file

@ -0,0 +1 @@
/durationcheck

201
vendor/github.com/charithe/durationcheck/LICENSE generated vendored Normal file
View file

@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright [yyyy] [name of copyright owner]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

5
vendor/github.com/charithe/durationcheck/Makefile generated vendored Normal file
View file

@ -0,0 +1,5 @@
build:
@GO111MODULE=on go build -ldflags '-s -w' -o durationcheck ./cmd/durationcheck/main.go
install:
@GO111MODULE=on go install -ldflags '-s -w' ./cmd/durationcheck

48
vendor/github.com/charithe/durationcheck/README.md generated vendored Normal file
View file

@ -0,0 +1,48 @@
[![CircleCI](https://circleci.com/gh/charithe/durationcheck.svg?style=svg)](https://circleci.com/gh/charithe/durationcheck)
Duration Check
===============
A Go linter to detect cases where two `time.Duration` values are being multiplied in possibly erroneous ways.
For example, consider the following (highly contrived) function:
```go
func waitFor(someDuration time.Duration) {
timeToWait := someDuration * time.Second
time.Sleep(timeToWait)
}
```
Although the above code would compile without any errors, its runtime behaviour would almost certainly be incorrect.
A caller would reasonably expect `waitFor(5 * time.Seconds)` to wait for ~5 seconds but they would actually end up
waiting for ~1,388,889 hours.
The above example is just for illustration purposes only. The problem is glaringly obvious in such a simple function
and even the greenest Gopher would discover the issue immediately. However, imagine a much more complicated function
with many more lines and it is not inconceivable that such logic errors could go unnoticed.
See the [test cases](testdata/src/a/a.go) for more examples of the types of errors detected by the linter.
Installation
-------------
Requires Go 1.11 or above.
```
go get -u github.com/charithe/durationcheck/cmd/durationcheck
```
Usage
-----
Invoke `durationcheck` with your package name
```
durationcheck ./...
# or
durationcheck github.com/you/yourproject/...
```

View file

@ -0,0 +1,188 @@
package durationcheck
import (
"bytes"
"fmt"
"go/ast"
"go/format"
"go/token"
"go/types"
"log"
"os"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/inspect"
"golang.org/x/tools/go/ast/inspector"
)
var Analyzer = &analysis.Analyzer{
Name: "durationcheck",
Doc: "check for two durations multiplied together",
Run: run,
Requires: []*analysis.Analyzer{inspect.Analyzer},
}
func run(pass *analysis.Pass) (interface{}, error) {
// if the package does not import time, it can be skipped from analysis
if !hasImport(pass.Pkg, "time") {
return nil, nil
}
inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
nodeTypes := []ast.Node{
(*ast.BinaryExpr)(nil),
}
inspect.Preorder(nodeTypes, check(pass))
return nil, nil
}
func hasImport(pkg *types.Package, importPath string) bool {
for _, imp := range pkg.Imports() {
if imp.Path() == importPath {
return true
}
}
return false
}
// check contains the logic for checking that time.Duration is used correctly in the code being analysed
func check(pass *analysis.Pass) func(ast.Node) {
return func(node ast.Node) {
expr := node.(*ast.BinaryExpr)
// we are only interested in multiplication
if expr.Op != token.MUL {
return
}
// get the types of the two operands
x, xOK := pass.TypesInfo.Types[expr.X]
y, yOK := pass.TypesInfo.Types[expr.Y]
if !xOK || !yOK {
return
}
if isDuration(x.Type) && isDuration(y.Type) {
// check that both sides are acceptable expressions
if isUnacceptableExpr(pass, expr.X) && isUnacceptableExpr(pass, expr.Y) {
pass.Reportf(expr.Pos(), "Multiplication of durations: `%s`", formatNode(expr))
}
}
}
}
func isDuration(x types.Type) bool {
return x.String() == "time.Duration" || x.String() == "*time.Duration"
}
// isUnacceptableExpr returns true if the argument is not an acceptable time.Duration expression
func isUnacceptableExpr(pass *analysis.Pass, expr ast.Expr) bool {
switch e := expr.(type) {
case *ast.BasicLit:
return false
case *ast.Ident:
return !isAcceptableNestedExpr(pass, e)
case *ast.CallExpr:
return !isAcceptableCast(pass, e)
case *ast.BinaryExpr:
return !isAcceptableNestedExpr(pass, e)
case *ast.UnaryExpr:
return !isAcceptableNestedExpr(pass, e)
case *ast.SelectorExpr:
return !isAcceptableNestedExpr(pass, e)
case *ast.StarExpr:
return !isAcceptableNestedExpr(pass, e)
case *ast.ParenExpr:
return !isAcceptableNestedExpr(pass, e)
case *ast.IndexExpr:
return !isAcceptableNestedExpr(pass, e)
default:
return true
}
}
// isAcceptableCast returns true if the argument is an acceptable expression cast to time.Duration
func isAcceptableCast(pass *analysis.Pass, e *ast.CallExpr) bool {
// check that there's a single argument
if len(e.Args) != 1 {
return false
}
// check that the argument is acceptable
if !isAcceptableNestedExpr(pass, e.Args[0]) {
return false
}
// check for time.Duration cast
selector, ok := e.Fun.(*ast.SelectorExpr)
if !ok {
return false
}
return isDurationCast(selector)
}
func isDurationCast(selector *ast.SelectorExpr) bool {
pkg, ok := selector.X.(*ast.Ident)
if !ok {
return false
}
if pkg.Name != "time" {
return false
}
return selector.Sel.Name == "Duration"
}
func isAcceptableNestedExpr(pass *analysis.Pass, n ast.Expr) bool {
switch e := n.(type) {
case *ast.BasicLit:
return true
case *ast.BinaryExpr:
return isAcceptableNestedExpr(pass, e.X) && isAcceptableNestedExpr(pass, e.Y)
case *ast.UnaryExpr:
return isAcceptableNestedExpr(pass, e.X)
case *ast.Ident:
return isAcceptableIdent(pass, e)
case *ast.CallExpr:
t := pass.TypesInfo.TypeOf(e)
return !isDuration(t)
case *ast.SelectorExpr:
return isAcceptableNestedExpr(pass, e.X) && isAcceptableIdent(pass, e.Sel)
case *ast.StarExpr:
return isAcceptableNestedExpr(pass, e.X)
case *ast.ParenExpr:
return isAcceptableNestedExpr(pass, e.X)
case *ast.IndexExpr:
t := pass.TypesInfo.TypeOf(e)
return !isDuration(t)
default:
return false
}
}
func isAcceptableIdent(pass *analysis.Pass, ident *ast.Ident) bool {
obj := pass.TypesInfo.ObjectOf(ident)
return !isDuration(obj.Type())
}
func formatNode(node ast.Node) string {
buf := new(bytes.Buffer)
if err := format.Node(buf, token.NewFileSet(), node); err != nil {
log.Printf("Error formatting expression: %v", err)
return ""
}
return buf.String()
}
func printAST(msg string, node ast.Node) {
fmt.Printf(">>> %s:\n%s\n\n\n", msg, formatNode(node))
ast.Fprint(os.Stdout, nil, node, nil)
fmt.Println("--------------")
}

5
vendor/github.com/charithe/durationcheck/go.mod generated vendored Normal file
View file

@ -0,0 +1,5 @@
module github.com/charithe/durationcheck
go 1.14
require golang.org/x/tools v0.1.0

26
vendor/github.com/charithe/durationcheck/go.sum generated vendored Normal file
View file

@ -0,0 +1,26 @@
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4=
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4 h1:myAQVi0cGEoqQVR5POX+8RR2mrocKqNN1hmeMqhX27k=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.0 h1:po9/4sTYwZU9lPhi1tOrb4hCv3qrhiQ77LZfGa2OjwY=
golang.org/x/tools v0.1.0/go.mod h1:xkSsbof2nBLbhDlRMhhhyNLN/zl3eTqcnHD5viDpcZ0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=

3
vendor/github.com/chavacava/garif/.gitignore generated vendored Normal file
View file

@ -0,0 +1,3 @@
*.test
*.out
.devcontainer/

21
vendor/github.com/chavacava/garif/LICENSE generated vendored Normal file
View file

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 Salvador Cavadini
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

52
vendor/github.com/chavacava/garif/README.md generated vendored Normal file
View file

@ -0,0 +1,52 @@
# garif
A GO package to create and manipulate SARIF logs.
SARIF, from _Static Analysis Results Interchange Format_, is a standard JSON-based format for the output of static analysis tools defined and promoted by [OASIS](https://www.oasis-open.org/).
Current supported version of the standard is [SARIF-v2.1.0](https://docs.oasis-open.org/sarif/sarif/v2.1.0/csprd01/sarif-v2.1.0-csprd01.html
).
## Usage
The package provides access to every element of the SARIF model, therefore you are free to manipulate it at every detail.
The package also provides constructors functions (`New...`) and decorators methods (`With...`) that simplify the creation of SARIF files for common use cases.
Using these constructors and decorators we can easily create the example SARIF file of the [Microsoft SARIF pages](https://github.com/microsoft/sarif-tutorials/blob/master/docs/1-Introduction.md)
```go
import to `github.com/chavacava/garif`
// ...
rule := garif.NewRule("no-unused-vars").
WithHelpUri("https://eslint.org/docs/rules/no-unused-vars").
WithShortDescription("disallow unused variables").
WithProperties("category", "Variables")
driver := garif.NewDriver("ESLint").
WithInformationUri("https://eslint.org").
WithRules(rule)
run := garif.NewRun(NewTool(driver)).
WithArtifactsURIs("file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js")
run.WithResult(rule.Id, "'x' is assigned a value but never used.", "file:///C:/dev/sarif/sarif-tutorials/samples/Introduction/simple-example.js", 1, 5)
logFile := garif.NewLogFile([]*Run{run}, Version210)
logFile.Write(os.Stdout)
```
## Why this package?
This package was initiated during my works on adding to [`revive`](https://github.com/mgechev/revive) a SARIF output formatter.
I've tried to use [go-sarif](https://github.com/owenrumney/go-sarif) by [Owen Rumney](https://github.com/owenrumney) but it is too focused in the use case of the static analyzer [tfsec](https://tfsec.dev) so I've decided to create a package flexible enough to generate SARIF files in broader cases.
## More information about SARIF
For more information about SARIF, you can visit the [Oasis Open](https://www.oasis-open.org/committees/tc_home.php?wg_abbrev=sarif) site.
## Contributing
Of course, contributions are welcome!

338
vendor/github.com/chavacava/garif/constructors.go generated vendored Normal file
View file

@ -0,0 +1,338 @@
package garif
// NewAddress creates a valid Address
func NewAddress() *Address {
return &Address{}
}
// NewArtifact creates a valid Artifact
func NewArtifact() *Artifact {
return &Artifact{}
}
// NewArtifactChange creates a valid ArtifactChange
func NewArtifactChange(location *ArtifactLocation, replacements ...*Replacement) *ArtifactChange {
return &ArtifactChange{
ArtifactLocation: location,
Replacements: replacements,
}
}
// NewArtifactContent creates a valid ArtifactContent
func NewArtifactContent() *ArtifactContent {
return &ArtifactContent{}
}
// NewArtifactLocation creates a valid ArtifactLocation
func NewArtifactLocation() *ArtifactLocation {
return &ArtifactLocation{}
}
// NewAttachment creates a valid Attachment
func NewAttachment(location *ArtifactLocation) *Attachment {
return &Attachment{ArtifactLocation: location}
}
// NewCodeFlow creates a valid CodeFlow
func NewCodeFlow(threadFlows ...*ThreadFlow) *CodeFlow {
return &CodeFlow{ThreadFlows: threadFlows}
}
// NewConfigurationOverride creates a valid ConfigurationOverride
func NewConfigurationOverride(configuration *ReportingConfiguration, descriptor *ReportingDescriptorReference) *ConfigurationOverride {
return &ConfigurationOverride{
Configuration: configuration,
Descriptor: descriptor,
}
}
// NewConversion creates a valid Conversion
func NewConversion(tool *Tool) *Conversion {
return &Conversion{Tool: tool}
}
// NewEdge creates a valid Edge
func NewEdge(id, sourceNodeId, targetNodeId string) *Edge {
return &Edge{
Id: id,
SourceNodeId: sourceNodeId,
TargetNodeId: targetNodeId,
}
}
// NewEdgeTraversal creates a valid EdgeTraversal
func NewEdgeTraversal(edgeId string) *EdgeTraversal {
return &EdgeTraversal{
EdgeId: edgeId,
}
}
// NewException creates a valid Exception
func NewException() *Exception {
return &Exception{}
}
// NewExternalProperties creates a valid ExternalProperties
func NewExternalProperties() *ExternalProperties {
return &ExternalProperties{}
}
// NewExternalPropertyFileReference creates a valid ExternalPropertyFileReference
func NewExternalPropertyFileReference() *ExternalPropertyFileReference {
return &ExternalPropertyFileReference{}
}
// NewExternalPropertyFileReferences creates a valid ExternalPropertyFileReferences
func NewExternalPropertyFileReferences() *ExternalPropertyFileReferences {
return &ExternalPropertyFileReferences{}
}
// NewFix creates a valid Fix
func NewFix(artifactChanges ...*ArtifactChange) *Fix {
return &Fix{
ArtifactChanges: artifactChanges,
}
}
// NewGraph creates a valid Graph
func NewGraph() *Graph {
return &Graph{}
}
// NewGraphTraversal creates a valid GraphTraversal
func NewGraphTraversal() *GraphTraversal {
return &GraphTraversal{}
}
// NewInvocation creates a valid Invocation
func NewInvocation(executionSuccessful bool) *Invocation {
return &Invocation{
ExecutionSuccessful: executionSuccessful,
}
}
// NewLocation creates a valid Location
func NewLocation() *Location {
return &Location{}
}
// NewLocationRelationship creates a valid LocationRelationship
func NewLocationRelationship(target int) *LocationRelationship {
return &LocationRelationship{
Target: target,
}
}
type LogFileVersion string
const Version210 LogFileVersion = "2.1.0"
// NewLogFile creates a valid LogFile
func NewLogFile(runs []*Run, version LogFileVersion) *LogFile {
return &LogFile{
Runs: runs,
Version: version,
}
}
// NewLogicalLocation creates a valid LogicalLocation
func NewLogicalLocation() *LogicalLocation {
return &LogicalLocation{}
}
// NewMessage creates a valid Message
func NewMessage() *Message {
return &Message{}
}
// NewMessageFromText creates a valid Message with the given text
func NewMessageFromText(text string) *Message {
return &Message{
Text: text,
}
}
// NewMultiformatMessageString creates a valid MultiformatMessageString
func NewMultiformatMessageString(text string) *MultiformatMessageString {
return &MultiformatMessageString{
Text: text,
}
}
// NewNode creates a valid Node
func NewNode(id string) *Node {
return &Node{
Id: id,
}
}
// NewNotification creates a valid Notification
func NewNotification(message *Message) *Notification {
return &Notification{
Message: message,
}
}
// NewPhysicalLocation creates a valid PhysicalLocation
func NewPhysicalLocation() *PhysicalLocation {
return &PhysicalLocation{}
}
// NewPropertyBag creates a valid PropertyBag
func NewPropertyBag() *PropertyBag {
return &PropertyBag{}
}
// NewRectangle creates a valid Rectangle
func NewRectangle() *Rectangle {
return &Rectangle{}
}
// NewRegion creates a valid Region
func NewRegion() *Region {
return &Region{}
}
// NewReplacement creates a valid Replacement
func NewReplacement(deletedRegion *Region) *Replacement {
return &Replacement{
DeletedRegion: deletedRegion,
}
}
// NewReportingConfiguration creates a valid ReportingConfiguration
func NewReportingConfiguration() *ReportingConfiguration {
return &ReportingConfiguration{}
}
// NewReportingDescriptor creates a valid ReportingDescriptor
func NewReportingDescriptor(id string) *ReportingDescriptor {
return &ReportingDescriptor{
Id: id,
}
}
// NewRule is an alias for NewReportingDescriptor
func NewRule(id string) *ReportingDescriptor {
return NewReportingDescriptor(id)
}
// NewReportingDescriptorReference creates a valid ReportingDescriptorReference
func NewReportingDescriptorReference() *ReportingDescriptorReference {
return &ReportingDescriptorReference{}
}
// NewReportingDescriptorRelationship creates a valid ReportingDescriptorRelationship
func NewReportingDescriptorRelationship(target *ReportingDescriptorReference) *ReportingDescriptorRelationship {
return &ReportingDescriptorRelationship{
Target: target,
}
}
// NewResult creates a valid Result
func NewResult(message *Message) *Result {
return &Result{
Message: message,
}
}
// NewResultProvenance creates a valid ResultProvenance
func NewResultProvenance() *ResultProvenance {
return &ResultProvenance{}
}
// NewRun creates a valid Run
func NewRun(tool *Tool) *Run {
return &Run{
Tool: tool,
}
}
// NewRunAutomationDetails creates a valid RunAutomationDetails
func NewRunAutomationDetails() *RunAutomationDetails {
return &RunAutomationDetails{}
}
// New creates a valid
func NewSpecialLocations() *SpecialLocations {
return &SpecialLocations{}
}
// NewStack creates a valid Stack
func NewStack(frames ...*StackFrame) *Stack {
return &Stack{
Frames: frames,
}
}
// NewStackFrame creates a valid StackFrame
func NewStackFrame() *StackFrame {
return &StackFrame{}
}
// NewSuppression creates a valid Suppression
func NewSuppression(kind string) *Suppression {
return &Suppression{
Kind: kind,
}
}
// NewThreadFlow creates a valid ThreadFlow
func NewThreadFlow(locations []*ThreadFlowLocation) *ThreadFlow {
return &ThreadFlow{
Locations: locations,
}
}
// NewThreadFlowLocation creates a valid ThreadFlowLocation
func NewThreadFlowLocation() *ThreadFlowLocation {
return &ThreadFlowLocation{}
}
// NewTool creates a valid Tool
func NewTool(driver *ToolComponent) *Tool {
return &Tool{
Driver: driver,
}
}
// NewToolComponent creates a valid ToolComponent
func NewToolComponent(name string) *ToolComponent {
return &ToolComponent{
Name: name,
}
}
// NewDriver is an alias for NewToolComponent
func NewDriver(name string) *ToolComponent {
return NewToolComponent(name)
}
// NewToolComponentReference creates a valid ToolComponentReference
func NewToolComponentReference() *ToolComponentReference {
return &ToolComponentReference{}
}
// NewTranslationMetadata creates a valid TranslationMetadata
func NewTranslationMetadata(name string) *TranslationMetadata {
return &TranslationMetadata{
Name: name,
}
}
// NewVersionControlDetails creates a valid VersionControlDetails
func NewVersionControlDetails(repositoryUri string) *VersionControlDetails {
return &VersionControlDetails{
RepositoryUri: repositoryUri,
}
}
// NewWebRequest creates a valid WebRequest
func NewWebRequest() *WebRequest {
return &WebRequest{}
}
// NewWebResponse creates a valid WebResponse
func NewWebResponse() *WebResponse {
return &WebResponse{}
}

94
vendor/github.com/chavacava/garif/decorators.go generated vendored Normal file
View file

@ -0,0 +1,94 @@
package garif
// WithLineColumn sets a physical location with the given line and column
func (l *Location) WithLineColumn(line, column int) *Location {
if l.PhysicalLocation == nil {
l.PhysicalLocation = NewPhysicalLocation()
}
l.PhysicalLocation.Region = NewRegion()
l.PhysicalLocation.Region.StartLine = line
l.PhysicalLocation.Region.StartColumn = column
return l
}
// WithURI sets a physical location with the given URI
func (l *Location) WithURI(uri string) *Location {
if l.PhysicalLocation == nil {
l.PhysicalLocation = NewPhysicalLocation()
}
l.PhysicalLocation.ArtifactLocation = NewArtifactLocation()
l.PhysicalLocation.ArtifactLocation.Uri = uri
return l
}
// WithKeyValue sets (overwrites) the value of the given key
func (b PropertyBag) WithKeyValue(key string, value interface{}) PropertyBag {
b[key] = value
return b
}
// WithHelpUri sets the help URI for this ReportingDescriptor
func (r *ReportingDescriptor) WithHelpUri(uri string) *ReportingDescriptor {
r.HelpUri = uri
return r
}
// WithProperties adds the key & value to the properties of this ReportingDescriptor
func (r *ReportingDescriptor) WithProperties(key string, value interface{}) *ReportingDescriptor {
if r.Properties == nil {
r.Properties = NewPropertyBag()
}
r.Properties.WithKeyValue(key, value)
return r
}
// WithArtifactsURIs adds the given URI as artifacts of this Run
func (r *Run) WithArtifactsURIs(uris ...string) *Run {
if r.Artifacts == nil {
r.Artifacts = []*Artifact{}
}
for _, uri := range uris {
a := NewArtifact()
a.Location = NewArtifactLocation()
a.Location.Uri = uri
r.Artifacts = append(r.Artifacts, a)
}
return r
}
// WithResult adds a result to this Run
func (r *Run) WithResult(ruleId string, message string, uri string, line int, column int) *Run {
if r.Results == nil {
r.Results = []*Result{}
}
msg := NewMessage()
msg.Text = message
result := NewResult(msg)
location := NewLocation().WithURI(uri).WithLineColumn(line, column)
result.Locations = append(result.Locations, location)
result.RuleId = ruleId
r.Results = append(r.Results, result)
return r
}
// WithInformationUri sets the information URI
func (t *ToolComponent) WithInformationUri(uri string) *ToolComponent {
t.InformationUri = uri
return t
}
// WithRules sets (overwrites) the rules
func (t *ToolComponent) WithRules(rules ...*ReportingDescriptor) *ToolComponent {
t.Rules = rules
return t
}

11
vendor/github.com/chavacava/garif/doc.go generated vendored Normal file
View file

@ -0,0 +1,11 @@
// Package garif defines all the GO structures required to model a SARIF log file.
// These structures were created using the JSON-schema sarif-schema-2.1.0.json of SARIF logfiles
// available at https://github.com/oasis-tcs/sarif-spec/tree/master/Schemata.
//
// The package provides constructors for all structures (see constructors.go) These constructors
// ensure that the returned structure instantiation is valid with respect to the JSON schema and
// should be used in place of plain structure instantiation.
// The root structure is LogFile.
//
// The package provides utility decorators for the most commonly used structures (see decorators.go)
package garif

5
vendor/github.com/chavacava/garif/go.mod generated vendored Normal file
View file

@ -0,0 +1,5 @@
module github.com/chavacava/garif
go 1.16
require github.com/stretchr/testify v1.7.0

11
vendor/github.com/chavacava/garif/go.sum generated vendored Normal file
View file

@ -0,0 +1,11 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.7.0 h1:nwc3DEeHmmLAfoZucVR881uASk0Mfjw8xYJ99tb5CcY=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

26
vendor/github.com/chavacava/garif/io.go generated vendored Normal file
View file

@ -0,0 +1,26 @@
package garif
import (
"encoding/json"
"io"
)
// Write writes the JSON
func (l *LogFile) Write(w io.Writer) error {
marshal, err := json.Marshal(l)
if err != nil {
return err
}
_, err = w.Write(marshal)
return err
}
// PrettyWrite writes indented JSON
func (l *LogFile) PrettyWrite(w io.Writer) error {
marshal, err := json.MarshalIndent(l, "", " ")
if err != nil {
return err
}
_, err = w.Write(marshal)
return err
}

1486
vendor/github.com/chavacava/garif/models.go generated vendored Normal file

File diff suppressed because it is too large Load diff

29
vendor/github.com/daixiang0/gci/LICENSE generated vendored Normal file
View file

@ -0,0 +1,29 @@
BSD 3-Clause License
Copyright (c) 2020, Xiang Dai
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

383
vendor/github.com/daixiang0/gci/pkg/gci/gci.go generated vendored Normal file
View file

@ -0,0 +1,383 @@
package gci
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"os/exec"
"path/filepath"
"sort"
"strings"
)
const (
// pkg type: standard, remote, local
standard int = iota
// 3rd-party packages
remote
local
commentFlag = "//"
)
var (
importStartFlag = []byte(`
import (
`)
importEndFlag = []byte(`
)
`)
)
type FlagSet struct {
LocalFlag []string
DoWrite, DoDiff *bool
}
type pkg struct {
list map[int][]string
comment map[string]string
alias map[string]string
}
// ParseLocalFlag takes a comma-separated list of
// package-name-prefixes (as passed to the "-local" flag), and splits
// it in to a list. This is different than strings.Split in that it
// handles the empty string and empty entries in the list.
func ParseLocalFlag(str string) []string {
return strings.FieldsFunc(str, func(c rune) bool { return c == ',' })
}
func newPkg(data [][]byte, localFlag []string) *pkg {
listMap := make(map[int][]string)
commentMap := make(map[string]string)
aliasMap := make(map[string]string)
p := &pkg{
list: listMap,
comment: commentMap,
alias: aliasMap,
}
formatData := make([]string, 0)
// remove all empty lines
for _, v := range data {
if len(v) > 0 {
formatData = append(formatData, strings.TrimSpace(string(v)))
}
}
n := len(formatData)
for i := n - 1; i >= 0; i-- {
line := formatData[i]
// check commentFlag:
// 1. one line commentFlag
// 2. commentFlag after import path
commentIndex := strings.Index(line, commentFlag)
if commentIndex == 0 {
// comment in the last line is useless, ignore it
if i+1 >= n {
continue
}
pkg, _, _ := getPkgInfo(formatData[i+1], strings.Index(formatData[i+1], commentFlag) >= 0)
p.comment[pkg] = line
continue
} else if commentIndex > 0 {
pkg, alias, comment := getPkgInfo(line, true)
if alias != "" {
p.alias[pkg] = alias
}
p.comment[pkg] = comment
pkgType := getPkgType(pkg, localFlag)
p.list[pkgType] = append(p.list[pkgType], pkg)
continue
}
pkg, alias, _ := getPkgInfo(line, false)
if alias != "" {
p.alias[pkg] = alias
}
pkgType := getPkgType(pkg, localFlag)
p.list[pkgType] = append(p.list[pkgType], pkg)
}
return p
}
// fmt format import pkgs as expected
func (p *pkg) fmt() []byte {
ret := make([]string, 0, 100)
for pkgType := range []int{standard, remote, local} {
sort.Strings(p.list[pkgType])
for _, s := range p.list[pkgType] {
if p.comment[s] != "" {
l := fmt.Sprintf("%s%s%s%s", linebreak, indent, p.comment[s], linebreak)
ret = append(ret, l)
}
if p.alias[s] != "" {
s = fmt.Sprintf("%s%s%s%s%s", indent, p.alias[s], blank, s, linebreak)
} else {
s = fmt.Sprintf("%s%s%s", indent, s, linebreak)
}
ret = append(ret, s)
}
if len(p.list[pkgType]) > 0 {
ret = append(ret, linebreak)
}
}
if len(ret) > 0 && ret[len(ret)-1] == linebreak {
ret = ret[:len(ret)-1]
}
// remove duplicate empty lines
s1 := fmt.Sprintf("%s%s%s%s", linebreak, linebreak, linebreak, indent)
s2 := fmt.Sprintf("%s%s%s", linebreak, linebreak, indent)
return []byte(strings.ReplaceAll(strings.Join(ret, ""), s1, s2))
}
// getPkgInfo assume line is a import path, and return (path, alias, comment)
func getPkgInfo(line string, comment bool) (string, string, string) {
if comment {
s := strings.Split(line, commentFlag)
pkgArray := strings.Split(s[0], blank)
if len(pkgArray) > 1 {
return pkgArray[1], pkgArray[0], fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1]))
} else {
return strings.TrimSpace(pkgArray[0]), "", fmt.Sprintf("%s%s%s", commentFlag, blank, strings.TrimSpace(s[1]))
}
} else {
pkgArray := strings.Split(line, blank)
if len(pkgArray) > 1 {
return pkgArray[1], pkgArray[0], ""
} else {
return pkgArray[0], "", ""
}
}
}
func getPkgType(line string, localFlag []string) int {
pkgName := strings.Trim(line, "\"\\`")
for _, localPkg := range localFlag {
if strings.HasPrefix(pkgName, localPkg) {
return local
}
}
if isStandardPackage(pkgName) {
return standard
}
return remote
}
const (
blank = " "
indent = "\t"
linebreak = "\n"
)
func diff(b1, b2 []byte, filename string) (data []byte, err error) {
f1, err := writeTempFile("", "gci", b1)
if err != nil {
return
}
defer os.Remove(f1)
f2, err := writeTempFile("", "gci", b2)
if err != nil {
return
}
defer os.Remove(f2)
cmd := "diff"
data, err = exec.Command(cmd, "-u", f1, f2).CombinedOutput()
if len(data) > 0 {
// diff exits with a non-zero status when the files don't match.
// Ignore that failure as long as we get output.
return replaceTempFilename(data, filename)
}
return
}
func writeTempFile(dir, prefix string, data []byte) (string, error) {
file, err := ioutil.TempFile(dir, prefix)
if err != nil {
return "", err
}
_, err = file.Write(data)
if err1 := file.Close(); err == nil {
err = err1
}
if err != nil {
os.Remove(file.Name())
return "", err
}
return file.Name(), nil
}
// replaceTempFilename replaces temporary filenames in diff with actual one.
//
// --- /tmp/gofmt316145376 2017-02-03 19:13:00.280468375 -0500
// +++ /tmp/gofmt617882815 2017-02-03 19:13:00.280468375 -0500
// ...
// ->
// --- path/to/file.go.orig 2017-02-03 19:13:00.280468375 -0500
// +++ path/to/file.go 2017-02-03 19:13:00.280468375 -0500
// ...
func replaceTempFilename(diff []byte, filename string) ([]byte, error) {
bs := bytes.SplitN(diff, []byte{'\n'}, 3)
if len(bs) < 3 {
return nil, fmt.Errorf("got unexpected diff for %s", filename)
}
// Preserve timestamps.
var t0, t1 []byte
if i := bytes.LastIndexByte(bs[0], '\t'); i != -1 {
t0 = bs[0][i:]
}
if i := bytes.LastIndexByte(bs[1], '\t'); i != -1 {
t1 = bs[1][i:]
}
// Always print filepath with slash separator.
f := filepath.ToSlash(filename)
bs[0] = []byte(fmt.Sprintf("--- %s%s", f+".orig", t0))
bs[1] = []byte(fmt.Sprintf("+++ %s%s", f, t1))
return bytes.Join(bs, []byte{'\n'}), nil
}
func visitFile(set *FlagSet) filepath.WalkFunc {
return func(path string, f os.FileInfo, err error) error {
if err == nil && isGoFile(f) {
err = processFile(path, os.Stdout, set)
}
return err
}
}
func WalkDir(path string, set *FlagSet) error {
return filepath.Walk(path, visitFile(set))
}
func isGoFile(f os.FileInfo) bool {
// ignore non-Go files
name := f.Name()
return !f.IsDir() && !strings.HasPrefix(name, ".") && strings.HasSuffix(name, ".go")
}
func ProcessFile(filename string, out io.Writer, set *FlagSet) error {
return processFile(filename, out, set)
}
func processFile(filename string, out io.Writer, set *FlagSet) error {
var err error
f, err := os.Open(filename)
if err != nil {
return err
}
defer f.Close()
src, err := ioutil.ReadAll(f)
if err != nil {
return err
}
ori := make([]byte, len(src))
copy(ori, src)
start := bytes.Index(src, importStartFlag)
// in case no importStartFlag or importStartFlag exist in the commentFlag
if start < 0 {
fmt.Printf("skip file %s since no import\n", filename)
return nil
}
end := bytes.Index(src[start:], importEndFlag) + start
ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak))
p := newPkg(ret, set.LocalFlag)
res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...)
if !bytes.Equal(ori, res) {
if *set.DoWrite {
// On Windows, we need to re-set the permissions from the file. See golang/go#38225.
var perms os.FileMode
if fi, err := os.Stat(filename); err == nil {
perms = fi.Mode() & os.ModePerm
}
err = ioutil.WriteFile(filename, res, perms)
if err != nil {
return err
}
}
if *set.DoDiff {
data, err := diff(ori, res, filename)
if err != nil {
return fmt.Errorf("failed to diff: %v", err)
}
fmt.Printf("diff -u %s %s\n", filepath.ToSlash(filename+".orig"), filepath.ToSlash(filename))
if _, err := out.Write(data); err != nil {
return fmt.Errorf("failed to write: %v", err)
}
}
}
if !*set.DoWrite && !*set.DoDiff {
if _, err = out.Write(res); err != nil {
return fmt.Errorf("failed to write: %v", err)
}
}
return err
}
// Run return source and result in []byte if succeed
func Run(filename string, set *FlagSet) ([]byte, []byte, error) {
var err error
f, err := os.Open(filename)
if err != nil {
return nil, nil, err
}
defer f.Close()
src, err := ioutil.ReadAll(f)
if err != nil {
return nil, nil, err
}
ori := make([]byte, len(src))
copy(ori, src)
start := bytes.Index(src, importStartFlag)
// in case no importStartFlag or importStartFlag exist in the commentFlag
if start < 0 {
return nil, nil, nil
}
end := bytes.Index(src[start:], importEndFlag) + start
// in case import flags are part of a codegen template, or otherwise "wrong"
if start+len(importStartFlag) > end {
return nil, nil, nil
}
ret := bytes.Split(src[start+len(importStartFlag):end], []byte(linebreak))
p := newPkg(ret, set.LocalFlag)
res := append(src[:start+len(importStartFlag)], append(p.fmt(), src[end+1:]...)...)
if bytes.Equal(ori, res) {
return ori, nil, nil
}
return ori, res, nil
}

161
vendor/github.com/daixiang0/gci/pkg/gci/std.go generated vendored Normal file
View file

@ -0,0 +1,161 @@
package gci
// Code generated based on go1.16beta1. DO NOT EDIT.
var standardPackages = map[string]struct{}{
"archive/tar": {},
"archive/zip": {},
"bufio": {},
"bytes": {},
"compress/bzip2": {},
"compress/flate": {},
"compress/gzip": {},
"compress/lzw": {},
"compress/zlib": {},
"container/heap": {},
"container/list": {},
"container/ring": {},
"context": {},
"crypto": {},
"crypto/aes": {},
"crypto/cipher": {},
"crypto/des": {},
"crypto/dsa": {},
"crypto/ecdsa": {},
"crypto/ed25519": {},
"crypto/elliptic": {},
"crypto/hmac": {},
"crypto/md5": {},
"crypto/rand": {},
"crypto/rc4": {},
"crypto/rsa": {},
"crypto/sha1": {},
"crypto/sha256": {},
"crypto/sha512": {},
"crypto/subtle": {},
"crypto/tls": {},
"crypto/x509": {},
"crypto/x509/pkix": {},
"database/sql": {},
"database/sql/driver": {},
"debug/dwarf": {},
"debug/elf": {},
"debug/gosym": {},
"debug/macho": {},
"debug/pe": {},
"debug/plan9obj": {},
"embed": {},
"encoding": {},
"encoding/ascii85": {},
"encoding/asn1": {},
"encoding/base32": {},
"encoding/base64": {},
"encoding/binary": {},
"encoding/csv": {},
"encoding/gob": {},
"encoding/hex": {},
"encoding/json": {},
"encoding/pem": {},
"encoding/xml": {},
"errors": {},
"expvar": {},
"flag": {},
"fmt": {},
"go/ast": {},
"go/build": {},
"go/constant": {},
"go/doc": {},
"go/format": {},
"go/importer": {},
"go/parser": {},
"go/printer": {},
"go/scanner": {},
"go/token": {},
"go/types": {},
"hash": {},
"hash/adler32": {},
"hash/crc32": {},
"hash/crc64": {},
"hash/fnv": {},
"hash/maphash": {},
"html": {},
"html/template": {},
"image": {},
"image/color": {},
"image/color/palette": {},
"image/draw": {},
"image/gif": {},
"image/jpeg": {},
"image/png": {},
"index/suffixarray": {},
"io": {},
"io/fs": {},
"io/ioutil": {},
"log": {},
"log/syslog": {},
"math": {},
"math/big": {},
"math/bits": {},
"math/cmplx": {},
"math/rand": {},
"mime": {},
"mime/multipart": {},
"mime/quotedprintable": {},
"net": {},
"net/http": {},
"net/http/cgi": {},
"net/http/cookiejar": {},
"net/http/fcgi": {},
"net/http/httptest": {},
"net/http/httptrace": {},
"net/http/httputil": {},
"net/http/pprof": {},
"net/mail": {},
"net/rpc": {},
"net/rpc/jsonrpc": {},
"net/smtp": {},
"net/textproto": {},
"net/url": {},
"os": {},
"os/exec": {},
"os/signal": {},
"os/user": {},
"path": {},
"path/filepath": {},
"plugin": {},
"reflect": {},
"regexp": {},
"regexp/syntax": {},
"runtime": {},
"runtime/cgo": {},
"runtime/debug": {},
"runtime/metrics": {},
"runtime/pprof": {},
"runtime/race": {},
"runtime/trace": {},
"sort": {},
"strconv": {},
"strings": {},
"sync": {},
"sync/atomic": {},
"syscall": {},
"testing": {},
"testing/fstest": {},
"testing/iotest": {},
"testing/quick": {},
"text/scanner": {},
"text/tabwriter": {},
"text/template": {},
"text/template/parse": {},
"time": {},
"time/tzdata": {},
"unicode": {},
"unicode/utf16": {},
"unicode/utf8": {},
"unsafe": {},
}
func isStandardPackage(pkg string) bool {
_, ok := standardPackages[pkg]
return ok
}

View file

@ -0,0 +1 @@
.idea/

View file

@ -0,0 +1,19 @@
values:
regexp:
copyright-holder: Copyright \(c\) {{year-range}} Denis Tingajkin
template: |
{{copyright-holder}}
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

674
vendor/github.com/denis-tingajkin/go-header/LICENSE generated vendored Normal file
View file

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

81
vendor/github.com/denis-tingajkin/go-header/README.md generated vendored Normal file
View file

@ -0,0 +1,81 @@
# go-header
[![Actions Status](https://github.com/denis-tingajkin/go-header/workflows/ci/badge.svg)](https://github.com/denis-tingajkin/go-header/actions)
Go source code linter providing checks for license headers.
## Installation
For installation you can simply use `go get`.
```bash
go get github.com/denis-tingajkin/go-header/cmd/go-header
```
## Configuration
To configuring `.go-header.yml` linter you simply need to fill the next fields:
```yaml
---
temaplte: # expects header template string.
tempalte-path: # expects path to file with license header string.
values: # expects `const` or `regexp` node with values where values is a map string to string.
const:
key1: value1 # const value just checks equality. Note `key1` should be used in template string as {{ key1 }} or {{ KEY1 }}.
regexp:
key2: value2 # regexp value just checks regex match. The value should be a valid regexp pattern. Note `key2` should be used in template string as {{ key2 }} or {{ KEY2 }}.
```
Where `values` also can be used recursively. Example:
```yaml
values:
const:
key1: "value"
regexp:
key2: "{{key1}} value1" # Reads as regex pattern "value value1"
```
## Bult-in values
- **YEAR** - Expects current year. Example header value: `2020`. Example of template using: `{{YEAR}}` or `{{year}}`.
- **YEAR-RANGE** - Expects any valid year interval or current year. Example header value: `2020` or `2000-2020`. Example of template using: `{{year-range}}` or `{{YEAR-RANGE}}`.
## Execution
`go-header` linter expects file paths on input. If you want to run `go-header` only on diff files, then you can use this command:
```bash
go-header $(git diff --name-only | grep -E '.*\.go')
```
## Setup example
### Step 1
Create configuration file `.go-header.yml` in the root of project.
```yaml
---
values:
const:
MY COMPANY: mycompany.com
template: |
{{ MY COMPANY }}
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at:
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
```
### Step 2
You are ready! Execute `go-header ${PATH_TO_FILES}` from the root of the project.

146
vendor/github.com/denis-tingajkin/go-header/analyzer.go generated vendored Normal file
View file

@ -0,0 +1,146 @@
// Copyright (c) 2020 Denis Tingajkin
//
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package goheader
import (
"fmt"
"go/ast"
"os"
"os/exec"
"strings"
"time"
)
type Target struct {
Path string
File *ast.File
}
const iso = "2006-01-02 15:04:05 -0700"
func (t *Target) ModTime() (time.Time, error) {
diff, err := exec.Command("git", "diff", t.Path).CombinedOutput()
if err == nil && len(diff) == 0 {
line, err := exec.Command("git", "log", "-1", "--pretty=format:%cd", "--date=iso", "--", t.Path).CombinedOutput()
if err == nil {
return time.Parse(iso, string(line))
}
}
info, err := os.Stat(t.Path)
if err != nil {
return time.Time{}, err
}
return info.ModTime(), nil
}
type Analyzer struct {
values map[string]Value
template string
}
func (a *Analyzer) Analyze(target *Target) Issue {
if a.template == "" {
return NewIssue("Missed template for check")
}
if t, err := target.ModTime(); err == nil {
if t.Year() != time.Now().Year() {
return nil
}
}
file := target.File
var header string
var offset = Location{
Position: 1,
}
if len(file.Comments) > 0 && file.Comments[0].Pos() < file.Package {
if strings.HasPrefix(file.Comments[0].List[0].Text, "/*") {
header = (&ast.CommentGroup{List: []*ast.Comment{file.Comments[0].List[0]}}).Text()
} else {
header = file.Comments[0].Text()
offset.Position += 3
}
}
header = strings.TrimSpace(header)
if header == "" {
return NewIssue("Missed header for check")
}
s := NewReader(header)
s.SetOffset(offset)
t := NewReader(a.template)
for !s.Done() && !t.Done() {
templateCh := t.Peek()
if templateCh == '{' {
name := a.readField(t)
if a.values[name] == nil {
return NewIssue(fmt.Sprintf("Template has unknown value: %v", name))
}
if i := a.values[name].Read(s); i != nil {
return i
}
continue
}
sourceCh := s.Peek()
if sourceCh != templateCh {
l := s.Location()
notNextLine := func(r rune) bool {
return r != '\n'
}
actual := s.ReadWhile(notNextLine)
expected := t.ReadWhile(notNextLine)
return NewIssueWithLocation(fmt.Sprintf("Actual: %v\nExpected:%v", actual, expected), l)
}
s.Next()
t.Next()
}
if !s.Done() {
l := s.Location()
return NewIssueWithLocation(fmt.Sprintf("Unexpected string: %v", s.Finish()), l)
}
if !t.Done() {
l := s.Location()
return NewIssueWithLocation(fmt.Sprintf("Missed string: %v", t.Finish()), l)
}
return nil
}
func (a *Analyzer) readField(reader *Reader) string {
_ = reader.Next()
_ = reader.Next()
r := reader.ReadWhile(func(r rune) bool {
return r != '}'
})
_ = reader.Next()
_ = reader.Next()
return strings.ToLower(strings.TrimSpace(r))
}
func New(options ...Option) *Analyzer {
a := &Analyzer{}
for _, o := range options {
o.apply(a)
}
for _, v := range a.values {
err := v.Calculate(a.values)
if err != nil {
panic(err.Error())
}
}
return a
}

99
vendor/github.com/denis-tingajkin/go-header/config.go generated vendored Normal file
View file

@ -0,0 +1,99 @@
// Copyright (c) 2020 Denis Tingajkin
//
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package goheader
import (
"errors"
"fmt"
"io/ioutil"
"strings"
"time"
"gopkg.in/yaml.v2"
)
// Configuration represents go-header linter setup parameters
type Configuration struct {
// Values is map of values. Supports two types 'const` and `regexp`. Values can be used recursively.
Values map[string]map[string]string `yaml:"values"'`
// Template is template for checking. Uses values.
Template string `yaml:"template"`
// TemplatePath path to the template file. Useful if need to load the template from a specific file.
TemplatePath string `yaml:"template-path"`
}
func (c *Configuration) builtInValues() map[string]Value {
var result = make(map[string]Value)
year := fmt.Sprint(time.Now().Year())
result["year-range"] = &RegexpValue{
RawValue: strings.ReplaceAll(`(20\d\d\-YEAR)|(YEAR)`, "YEAR", year),
}
result["year"] = &ConstValue{
RawValue: year,
}
return result
}
func (c *Configuration) GetValues() (map[string]Value, error) {
var result = c.builtInValues()
createConst := func(raw string) Value {
return &ConstValue{RawValue: raw}
}
createRegexp := func(raw string) Value {
return &RegexpValue{RawValue: raw}
}
appendValues := func(m map[string]string, create func(string) Value) {
for k, v := range m {
key := strings.ToLower(k)
result[key] = create(v)
}
}
for k, v := range c.Values {
switch k {
case "const":
appendValues(v, createConst)
case "regexp":
appendValues(v, createRegexp)
default:
return nil, fmt.Errorf("unknown value type %v", k)
}
}
return result, nil
}
func (c *Configuration) GetTemplate() (string, error) {
if c.Template != "" {
return c.Template, nil
}
if c.TemplatePath == "" {
return "", errors.New("template has not passed")
}
if b, err := ioutil.ReadFile(c.TemplatePath); err != nil {
return "", err
} else {
c.Template = strings.TrimSpace(string(b))
return c.Template, nil
}
}
func (c *Configuration) Parse(p string) error {
b, err := ioutil.ReadFile(p)
if err != nil {
return err
}
return yaml.Unmarshal(b, c)
}

10
vendor/github.com/denis-tingajkin/go-header/go.mod generated vendored Normal file
View file

@ -0,0 +1,10 @@
module github.com/denis-tingajkin/go-header
go 1.15
require (
github.com/fatih/color v1.9.0
github.com/sirupsen/logrus v1.6.0
github.com/stretchr/testify v1.5.1
gopkg.in/yaml.v2 v2.2.2
)

31
vendor/github.com/denis-tingajkin/go-header/go.sum generated vendored Normal file
View file

@ -0,0 +1,31 @@
github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/fatih/color v1.9.0 h1:8xPHl4/q1VyqGIPif1F+1V3Y3lSmrq01EabUW3CoW5s=
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/konsorten/go-windows-terminal-sequences v1.0.3 h1:CE8S1cTafDpPvMhIxNJKvHsGVBgn1xWYf1NbHQhywc8=
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/mattn/go-colorable v0.1.4 h1:snbPLB8fVfU9iwbbo30TPtbLRzwWu6aJS6Xh4eaaviA=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.11 h1:FxPOTFNqGkuDUGi3H/qkUbQO4ZiBa2brKq5r0l8TGeM=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/sirupsen/logrus v1.6.0 h1:UBcNElsrwanuuMsnGSlYmtmgbb23qDR5dG+6X6Oo89I=
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894 h1:Cz4ceDQGXuKRnVBDTS23GTn/pU5OE2C0WrNTOYK1Uuc=
golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037 h1:YyJpGZS1sBuBCzLAR1VEpK193GlqGZbnPFnPV/5Rsb4=
golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

48
vendor/github.com/denis-tingajkin/go-header/issue.go generated vendored Normal file
View file

@ -0,0 +1,48 @@
// Copyright (c) 2020 Denis Tingajkin
//
// SPDX-License-Identifier: Apache-2.0
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at:
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package goheader
type Issue interface {
Location() Location
Message() string
}
type issue struct {
msg string
location Location
}
func (i *issue) Location() Location {
return i.location
}
func (i *issue) Message() string {
return i.msg
}
func NewIssueWithLocation(msg string, location Location) Issue {
return &issue{
msg: msg,
location: location,
}
}
func NewIssue(msg string) Issue {
return &issue{
msg: msg,
}
}

Some files were not shown because too many files have changed in this diff Show more