mirror of
https://github.com/woodpecker-ci/woodpecker.git
synced 2025-04-11 14:24:13 +00:00
update golang.org/x/text v0.3.7 -> v0.3.8
CVE-2022-32149
This commit is contained in:
parent
8acc1b6875
commit
5a267a5ea8
161 changed files with 6582 additions and 3485 deletions
9
go.mod
9
go.mod
|
@ -37,7 +37,7 @@ require (
|
|||
golang.org/x/crypto v0.0.0-20220824171710-5757bc0c5503
|
||||
golang.org/x/net v0.0.0-20220909164309-bea034e7d591
|
||||
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b
|
||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4
|
||||
google.golang.org/grpc v1.47.0
|
||||
google.golang.org/protobuf v1.28.0
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b
|
||||
|
@ -208,12 +208,11 @@ require (
|
|||
github.com/yagipy/maintidx v1.0.0 // indirect
|
||||
github.com/yeya24/promlinter v0.1.1-0.20210918184747-d757024714a1 // indirect
|
||||
gitlab.com/bosi/decorder v0.2.1 // indirect
|
||||
golang.org/x/mod v0.5.1 // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 // indirect
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10 // indirect
|
||||
golang.org/x/text v0.3.7 // indirect
|
||||
golang.org/x/text v0.3.8 // indirect
|
||||
golang.org/x/time v0.0.0-20220210224613-90d013bbcef8 // indirect
|
||||
golang.org/x/tools v0.1.9 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
golang.org/x/tools v0.1.12 // indirect
|
||||
google.golang.org/appengine v1.6.7 // indirect
|
||||
google.golang.org/genproto v0.0.0-20220616135557-88e70c0c3a90 // indirect
|
||||
gopkg.in/ini.v1 v1.66.2 // indirect
|
||||
|
|
13
go.sum
13
go.sum
|
@ -1217,8 +1217,9 @@ golang.org/x/mod v0.4.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
|||
golang.org/x/mod v0.4.1/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.5.0/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.5.1 h1:OJxoQ/rynoF0dcCdI7cLPktw/hR2cueqYfjm43oqK38=
|
||||
golang.org/x/mod v0.5.1/go.mod h1:5OXOZSfqPIIbmVBIIKWRFfZjPR0E5r58TLhUjH0a2Ro=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4 h1:6zppjxzCulZykYSLyVDYbneBfbaBIQPYMevg0bEwv2s=
|
||||
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
|
||||
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
|
@ -1308,8 +1309,8 @@ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJ
|
|||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f h1:Ax0t5p6N38Ga0dThY21weqDEyz2oklo4IvDkpigvkD8=
|
||||
golang.org/x/sync v0.0.0-20220601150217-0de741cfad7f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4 h1:uVc8UZUe6tr40fFVnUP5Oj+veunVezqYl9z7DYw9xzw=
|
||||
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
|
@ -1419,8 +1420,9 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
|||
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
|
||||
golang.org/x/text v0.3.7 h1:olpwvP2KacW1ZWvsR7uQhoyTYvKAupfQrRGBFM352Gk=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/text v0.3.8 h1:nAL+RVCQ9uMn3vJZbV+MRnydTJFPf8qqY42YiA6MrqY=
|
||||
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
|
||||
golang.org/x/time v0.0.0-20180412165947-fbb02b2291d2/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
|
@ -1532,8 +1534,9 @@ golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
|
|||
golang.org/x/tools v0.1.7/go.mod h1:LGqMHiF4EqQNHR1JncWGqT5BVaXmza+X+BDGol+dOxo=
|
||||
golang.org/x/tools v0.1.8/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.9-0.20211228192929-ee1ca4ffc4da/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.9 h1:j9KsMiaP1c3B0OTQGth0/k+miLGTgLsAFUCrF2vLcF8=
|
||||
golang.org/x/tools v0.1.9/go.mod h1:nABZi5QlRsZVlzPpHl034qft6wpY4eDcsTt5AaioBiU=
|
||||
golang.org/x/tools v0.1.12 h1:VveCTK38A2rkS8ZqFY25HIDFscX5X9OoEhJd3quQmXU=
|
||||
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
|
||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
|
|
1
vendor/golang.org/x/mod/modfile/read.go
generated
vendored
1
vendor/golang.org/x/mod/modfile/read.go
generated
vendored
|
@ -285,7 +285,6 @@ func (x *Line) Span() (start, end Position) {
|
|||
// "x"
|
||||
// "y"
|
||||
// )
|
||||
//
|
||||
type LineBlock struct {
|
||||
Comments
|
||||
Start Position
|
||||
|
|
250
vendor/golang.org/x/mod/modfile/rule.go
generated
vendored
250
vendor/golang.org/x/mod/modfile/rule.go
generated
vendored
|
@ -423,68 +423,12 @@ func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, a
|
|||
}
|
||||
|
||||
case "replace":
|
||||
arrow := 2
|
||||
if len(args) >= 2 && args[1] == "=>" {
|
||||
arrow = 1
|
||||
}
|
||||
if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" {
|
||||
errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb)
|
||||
replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix)
|
||||
if wrappederr != nil {
|
||||
*errs = append(*errs, *wrappederr)
|
||||
return
|
||||
}
|
||||
s, err := parseString(&args[0])
|
||||
if err != nil {
|
||||
errorf("invalid quoted string: %v", err)
|
||||
return
|
||||
}
|
||||
pathMajor, err := modulePathMajor(s)
|
||||
if err != nil {
|
||||
wrapModPathError(s, err)
|
||||
return
|
||||
}
|
||||
var v string
|
||||
if arrow == 2 {
|
||||
v, err = parseVersion(verb, s, &args[1], fix)
|
||||
if err != nil {
|
||||
wrapError(err)
|
||||
return
|
||||
}
|
||||
if err := module.CheckPathMajor(v, pathMajor); err != nil {
|
||||
wrapModPathError(s, err)
|
||||
return
|
||||
}
|
||||
}
|
||||
ns, err := parseString(&args[arrow+1])
|
||||
if err != nil {
|
||||
errorf("invalid quoted string: %v", err)
|
||||
return
|
||||
}
|
||||
nv := ""
|
||||
if len(args) == arrow+2 {
|
||||
if !IsDirectoryPath(ns) {
|
||||
errorf("replacement module without version must be directory path (rooted or starting with ./ or ../)")
|
||||
return
|
||||
}
|
||||
if filepath.Separator == '/' && strings.Contains(ns, `\`) {
|
||||
errorf("replacement directory appears to be Windows path (on a non-windows system)")
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(args) == arrow+3 {
|
||||
nv, err = parseVersion(verb, ns, &args[arrow+2], fix)
|
||||
if err != nil {
|
||||
wrapError(err)
|
||||
return
|
||||
}
|
||||
if IsDirectoryPath(ns) {
|
||||
errorf("replacement module directory path %q cannot have version", ns)
|
||||
return
|
||||
}
|
||||
}
|
||||
f.Replace = append(f.Replace, &Replace{
|
||||
Old: module.Version{Path: s, Version: v},
|
||||
New: module.Version{Path: ns, Version: nv},
|
||||
Syntax: line,
|
||||
})
|
||||
f.Replace = append(f.Replace, replace)
|
||||
|
||||
case "retract":
|
||||
rationale := parseDirectiveComment(block, line)
|
||||
|
@ -515,6 +459,83 @@ func (f *File) add(errs *ErrorList, block *LineBlock, line *Line, verb string, a
|
|||
}
|
||||
}
|
||||
|
||||
func parseReplace(filename string, line *Line, verb string, args []string, fix VersionFixer) (*Replace, *Error) {
|
||||
wrapModPathError := func(modPath string, err error) *Error {
|
||||
return &Error{
|
||||
Filename: filename,
|
||||
Pos: line.Start,
|
||||
ModPath: modPath,
|
||||
Verb: verb,
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
wrapError := func(err error) *Error {
|
||||
return &Error{
|
||||
Filename: filename,
|
||||
Pos: line.Start,
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
errorf := func(format string, args ...interface{}) *Error {
|
||||
return wrapError(fmt.Errorf(format, args...))
|
||||
}
|
||||
|
||||
arrow := 2
|
||||
if len(args) >= 2 && args[1] == "=>" {
|
||||
arrow = 1
|
||||
}
|
||||
if len(args) < arrow+2 || len(args) > arrow+3 || args[arrow] != "=>" {
|
||||
return nil, errorf("usage: %s module/path [v1.2.3] => other/module v1.4\n\t or %s module/path [v1.2.3] => ../local/directory", verb, verb)
|
||||
}
|
||||
s, err := parseString(&args[0])
|
||||
if err != nil {
|
||||
return nil, errorf("invalid quoted string: %v", err)
|
||||
}
|
||||
pathMajor, err := modulePathMajor(s)
|
||||
if err != nil {
|
||||
return nil, wrapModPathError(s, err)
|
||||
|
||||
}
|
||||
var v string
|
||||
if arrow == 2 {
|
||||
v, err = parseVersion(verb, s, &args[1], fix)
|
||||
if err != nil {
|
||||
return nil, wrapError(err)
|
||||
}
|
||||
if err := module.CheckPathMajor(v, pathMajor); err != nil {
|
||||
return nil, wrapModPathError(s, err)
|
||||
}
|
||||
}
|
||||
ns, err := parseString(&args[arrow+1])
|
||||
if err != nil {
|
||||
return nil, errorf("invalid quoted string: %v", err)
|
||||
}
|
||||
nv := ""
|
||||
if len(args) == arrow+2 {
|
||||
if !IsDirectoryPath(ns) {
|
||||
return nil, errorf("replacement module without version must be directory path (rooted or starting with ./ or ../)")
|
||||
}
|
||||
if filepath.Separator == '/' && strings.Contains(ns, `\`) {
|
||||
return nil, errorf("replacement directory appears to be Windows path (on a non-windows system)")
|
||||
}
|
||||
}
|
||||
if len(args) == arrow+3 {
|
||||
nv, err = parseVersion(verb, ns, &args[arrow+2], fix)
|
||||
if err != nil {
|
||||
return nil, wrapError(err)
|
||||
}
|
||||
if IsDirectoryPath(ns) {
|
||||
return nil, errorf("replacement module directory path %q cannot have version", ns)
|
||||
|
||||
}
|
||||
}
|
||||
return &Replace{
|
||||
Old: module.Version{Path: s, Version: v},
|
||||
New: module.Version{Path: ns, Version: nv},
|
||||
Syntax: line,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// fixRetract applies fix to each retract directive in f, appending any errors
|
||||
// to errs.
|
||||
//
|
||||
|
@ -556,6 +577,63 @@ func (f *File) fixRetract(fix VersionFixer, errs *ErrorList) {
|
|||
}
|
||||
}
|
||||
|
||||
func (f *WorkFile) add(errs *ErrorList, line *Line, verb string, args []string, fix VersionFixer) {
|
||||
wrapError := func(err error) {
|
||||
*errs = append(*errs, Error{
|
||||
Filename: f.Syntax.Name,
|
||||
Pos: line.Start,
|
||||
Err: err,
|
||||
})
|
||||
}
|
||||
errorf := func(format string, args ...interface{}) {
|
||||
wrapError(fmt.Errorf(format, args...))
|
||||
}
|
||||
|
||||
switch verb {
|
||||
default:
|
||||
errorf("unknown directive: %s", verb)
|
||||
|
||||
case "go":
|
||||
if f.Go != nil {
|
||||
errorf("repeated go statement")
|
||||
return
|
||||
}
|
||||
if len(args) != 1 {
|
||||
errorf("go directive expects exactly one argument")
|
||||
return
|
||||
} else if !GoVersionRE.MatchString(args[0]) {
|
||||
errorf("invalid go version '%s': must match format 1.23", args[0])
|
||||
return
|
||||
}
|
||||
|
||||
f.Go = &Go{Syntax: line}
|
||||
f.Go.Version = args[0]
|
||||
|
||||
case "use":
|
||||
if len(args) != 1 {
|
||||
errorf("usage: %s local/dir", verb)
|
||||
return
|
||||
}
|
||||
s, err := parseString(&args[0])
|
||||
if err != nil {
|
||||
errorf("invalid quoted string: %v", err)
|
||||
return
|
||||
}
|
||||
f.Use = append(f.Use, &Use{
|
||||
Path: s,
|
||||
Syntax: line,
|
||||
})
|
||||
|
||||
case "replace":
|
||||
replace, wrappederr := parseReplace(f.Syntax.Name, line, verb, args, fix)
|
||||
if wrappederr != nil {
|
||||
*errs = append(*errs, *wrappederr)
|
||||
return
|
||||
}
|
||||
f.Replace = append(f.Replace, replace)
|
||||
}
|
||||
}
|
||||
|
||||
// IsDirectoryPath reports whether the given path should be interpreted
|
||||
// as a directory path. Just like on the go command line, relative paths
|
||||
// and rooted paths are directory paths; the rest are module paths.
|
||||
|
@ -1212,6 +1290,10 @@ func (f *File) DropExclude(path, vers string) error {
|
|||
}
|
||||
|
||||
func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error {
|
||||
return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers)
|
||||
}
|
||||
|
||||
func addReplace(syntax *FileSyntax, replace *[]*Replace, oldPath, oldVers, newPath, newVers string) error {
|
||||
need := true
|
||||
old := module.Version{Path: oldPath, Version: oldVers}
|
||||
new := module.Version{Path: newPath, Version: newVers}
|
||||
|
@ -1225,12 +1307,12 @@ func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error {
|
|||
}
|
||||
|
||||
var hint *Line
|
||||
for _, r := range f.Replace {
|
||||
for _, r := range *replace {
|
||||
if r.Old.Path == oldPath && (oldVers == "" || r.Old.Version == oldVers) {
|
||||
if need {
|
||||
// Found replacement for old; update to use new.
|
||||
r.New = new
|
||||
f.Syntax.updateLine(r.Syntax, tokens...)
|
||||
syntax.updateLine(r.Syntax, tokens...)
|
||||
need = false
|
||||
continue
|
||||
}
|
||||
|
@ -1243,7 +1325,7 @@ func (f *File) AddReplace(oldPath, oldVers, newPath, newVers string) error {
|
|||
}
|
||||
}
|
||||
if need {
|
||||
f.Replace = append(f.Replace, &Replace{Old: old, New: new, Syntax: f.Syntax.addLine(hint, tokens...)})
|
||||
*replace = append(*replace, &Replace{Old: old, New: new, Syntax: syntax.addLine(hint, tokens...)})
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1329,30 +1411,36 @@ func (f *File) SortBlocks() {
|
|||
// retract directives are not de-duplicated since comments are
|
||||
// meaningful, and versions may be retracted multiple times.
|
||||
func (f *File) removeDups() {
|
||||
removeDups(f.Syntax, &f.Exclude, &f.Replace)
|
||||
}
|
||||
|
||||
func removeDups(syntax *FileSyntax, exclude *[]*Exclude, replace *[]*Replace) {
|
||||
kill := make(map[*Line]bool)
|
||||
|
||||
// Remove duplicate excludes.
|
||||
haveExclude := make(map[module.Version]bool)
|
||||
for _, x := range f.Exclude {
|
||||
if haveExclude[x.Mod] {
|
||||
kill[x.Syntax] = true
|
||||
continue
|
||||
if exclude != nil {
|
||||
haveExclude := make(map[module.Version]bool)
|
||||
for _, x := range *exclude {
|
||||
if haveExclude[x.Mod] {
|
||||
kill[x.Syntax] = true
|
||||
continue
|
||||
}
|
||||
haveExclude[x.Mod] = true
|
||||
}
|
||||
haveExclude[x.Mod] = true
|
||||
}
|
||||
var excl []*Exclude
|
||||
for _, x := range f.Exclude {
|
||||
if !kill[x.Syntax] {
|
||||
excl = append(excl, x)
|
||||
var excl []*Exclude
|
||||
for _, x := range *exclude {
|
||||
if !kill[x.Syntax] {
|
||||
excl = append(excl, x)
|
||||
}
|
||||
}
|
||||
*exclude = excl
|
||||
}
|
||||
f.Exclude = excl
|
||||
|
||||
// Remove duplicate replacements.
|
||||
// Later replacements take priority over earlier ones.
|
||||
haveReplace := make(map[module.Version]bool)
|
||||
for i := len(f.Replace) - 1; i >= 0; i-- {
|
||||
x := f.Replace[i]
|
||||
for i := len(*replace) - 1; i >= 0; i-- {
|
||||
x := (*replace)[i]
|
||||
if haveReplace[x.Old] {
|
||||
kill[x.Syntax] = true
|
||||
continue
|
||||
|
@ -1360,18 +1448,18 @@ func (f *File) removeDups() {
|
|||
haveReplace[x.Old] = true
|
||||
}
|
||||
var repl []*Replace
|
||||
for _, x := range f.Replace {
|
||||
for _, x := range *replace {
|
||||
if !kill[x.Syntax] {
|
||||
repl = append(repl, x)
|
||||
}
|
||||
}
|
||||
f.Replace = repl
|
||||
*replace = repl
|
||||
|
||||
// Duplicate require and retract directives are not removed.
|
||||
|
||||
// Drop killed statements from the syntax tree.
|
||||
var stmts []Expr
|
||||
for _, stmt := range f.Syntax.Stmt {
|
||||
for _, stmt := range syntax.Stmt {
|
||||
switch stmt := stmt.(type) {
|
||||
case *Line:
|
||||
if kill[stmt] {
|
||||
|
@ -1391,7 +1479,7 @@ func (f *File) removeDups() {
|
|||
}
|
||||
stmts = append(stmts, stmt)
|
||||
}
|
||||
f.Syntax.Stmt = stmts
|
||||
syntax.Stmt = stmts
|
||||
}
|
||||
|
||||
// lineLess returns whether li should be sorted before lj. It sorts
|
||||
|
|
234
vendor/golang.org/x/mod/modfile/work.go
generated
vendored
Normal file
234
vendor/golang.org/x/mod/modfile/work.go
generated
vendored
Normal file
|
@ -0,0 +1,234 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package modfile
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A WorkFile is the parsed, interpreted form of a go.work file.
|
||||
type WorkFile struct {
|
||||
Go *Go
|
||||
Use []*Use
|
||||
Replace []*Replace
|
||||
|
||||
Syntax *FileSyntax
|
||||
}
|
||||
|
||||
// A Use is a single directory statement.
|
||||
type Use struct {
|
||||
Path string // Use path of module.
|
||||
ModulePath string // Module path in the comment.
|
||||
Syntax *Line
|
||||
}
|
||||
|
||||
// ParseWork parses and returns a go.work file.
|
||||
//
|
||||
// file is the name of the file, used in positions and errors.
|
||||
//
|
||||
// data is the content of the file.
|
||||
//
|
||||
// fix is an optional function that canonicalizes module versions.
|
||||
// If fix is nil, all module versions must be canonical (module.CanonicalVersion
|
||||
// must return the same string).
|
||||
func ParseWork(file string, data []byte, fix VersionFixer) (*WorkFile, error) {
|
||||
fs, err := parse(file, data)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f := &WorkFile{
|
||||
Syntax: fs,
|
||||
}
|
||||
var errs ErrorList
|
||||
|
||||
for _, x := range fs.Stmt {
|
||||
switch x := x.(type) {
|
||||
case *Line:
|
||||
f.add(&errs, x, x.Token[0], x.Token[1:], fix)
|
||||
|
||||
case *LineBlock:
|
||||
if len(x.Token) > 1 {
|
||||
errs = append(errs, Error{
|
||||
Filename: file,
|
||||
Pos: x.Start,
|
||||
Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")),
|
||||
})
|
||||
continue
|
||||
}
|
||||
switch x.Token[0] {
|
||||
default:
|
||||
errs = append(errs, Error{
|
||||
Filename: file,
|
||||
Pos: x.Start,
|
||||
Err: fmt.Errorf("unknown block type: %s", strings.Join(x.Token, " ")),
|
||||
})
|
||||
continue
|
||||
case "use", "replace":
|
||||
for _, l := range x.Line {
|
||||
f.add(&errs, l, x.Token[0], l.Token, fix)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if len(errs) > 0 {
|
||||
return nil, errs
|
||||
}
|
||||
return f, nil
|
||||
}
|
||||
|
||||
// Cleanup cleans up the file f after any edit operations.
|
||||
// To avoid quadratic behavior, modifications like DropRequire
|
||||
// clear the entry but do not remove it from the slice.
|
||||
// Cleanup cleans out all the cleared entries.
|
||||
func (f *WorkFile) Cleanup() {
|
||||
w := 0
|
||||
for _, r := range f.Use {
|
||||
if r.Path != "" {
|
||||
f.Use[w] = r
|
||||
w++
|
||||
}
|
||||
}
|
||||
f.Use = f.Use[:w]
|
||||
|
||||
w = 0
|
||||
for _, r := range f.Replace {
|
||||
if r.Old.Path != "" {
|
||||
f.Replace[w] = r
|
||||
w++
|
||||
}
|
||||
}
|
||||
f.Replace = f.Replace[:w]
|
||||
|
||||
f.Syntax.Cleanup()
|
||||
}
|
||||
|
||||
func (f *WorkFile) AddGoStmt(version string) error {
|
||||
if !GoVersionRE.MatchString(version) {
|
||||
return fmt.Errorf("invalid language version string %q", version)
|
||||
}
|
||||
if f.Go == nil {
|
||||
stmt := &Line{Token: []string{"go", version}}
|
||||
f.Go = &Go{
|
||||
Version: version,
|
||||
Syntax: stmt,
|
||||
}
|
||||
// Find the first non-comment-only block that's and add
|
||||
// the go statement before it. That will keep file comments at the top.
|
||||
i := 0
|
||||
for i = 0; i < len(f.Syntax.Stmt); i++ {
|
||||
if _, ok := f.Syntax.Stmt[i].(*CommentBlock); !ok {
|
||||
break
|
||||
}
|
||||
}
|
||||
f.Syntax.Stmt = append(append(f.Syntax.Stmt[:i:i], stmt), f.Syntax.Stmt[i:]...)
|
||||
} else {
|
||||
f.Go.Version = version
|
||||
f.Syntax.updateLine(f.Go.Syntax, "go", version)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *WorkFile) AddUse(diskPath, modulePath string) error {
|
||||
need := true
|
||||
for _, d := range f.Use {
|
||||
if d.Path == diskPath {
|
||||
if need {
|
||||
d.ModulePath = modulePath
|
||||
f.Syntax.updateLine(d.Syntax, "use", AutoQuote(diskPath))
|
||||
need = false
|
||||
} else {
|
||||
d.Syntax.markRemoved()
|
||||
*d = Use{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if need {
|
||||
f.AddNewUse(diskPath, modulePath)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *WorkFile) AddNewUse(diskPath, modulePath string) {
|
||||
line := f.Syntax.addLine(nil, "use", AutoQuote(diskPath))
|
||||
f.Use = append(f.Use, &Use{Path: diskPath, ModulePath: modulePath, Syntax: line})
|
||||
}
|
||||
|
||||
func (f *WorkFile) SetUse(dirs []*Use) {
|
||||
need := make(map[string]string)
|
||||
for _, d := range dirs {
|
||||
need[d.Path] = d.ModulePath
|
||||
}
|
||||
|
||||
for _, d := range f.Use {
|
||||
if modulePath, ok := need[d.Path]; ok {
|
||||
d.ModulePath = modulePath
|
||||
} else {
|
||||
d.Syntax.markRemoved()
|
||||
*d = Use{}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO(#45713): Add module path to comment.
|
||||
|
||||
for diskPath, modulePath := range need {
|
||||
f.AddNewUse(diskPath, modulePath)
|
||||
}
|
||||
f.SortBlocks()
|
||||
}
|
||||
|
||||
func (f *WorkFile) DropUse(path string) error {
|
||||
for _, d := range f.Use {
|
||||
if d.Path == path {
|
||||
d.Syntax.markRemoved()
|
||||
*d = Use{}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *WorkFile) AddReplace(oldPath, oldVers, newPath, newVers string) error {
|
||||
return addReplace(f.Syntax, &f.Replace, oldPath, oldVers, newPath, newVers)
|
||||
}
|
||||
|
||||
func (f *WorkFile) DropReplace(oldPath, oldVers string) error {
|
||||
for _, r := range f.Replace {
|
||||
if r.Old.Path == oldPath && r.Old.Version == oldVers {
|
||||
r.Syntax.markRemoved()
|
||||
*r = Replace{}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *WorkFile) SortBlocks() {
|
||||
f.removeDups() // otherwise sorting is unsafe
|
||||
|
||||
for _, stmt := range f.Syntax.Stmt {
|
||||
block, ok := stmt.(*LineBlock)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
sort.SliceStable(block.Line, func(i, j int) bool {
|
||||
return lineLess(block.Line[i], block.Line[j])
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// removeDups removes duplicate replace directives.
|
||||
//
|
||||
// Later replace directives take priority.
|
||||
//
|
||||
// require directives are not de-duplicated. That's left up to higher-level
|
||||
// logic (MVS).
|
||||
//
|
||||
// retract directives are not de-duplicated since comments are
|
||||
// meaningful, and versions may be retracted multiple times.
|
||||
func (f *WorkFile) removeDups() {
|
||||
removeDups(f.Syntax, nil, &f.Replace)
|
||||
}
|
15
vendor/golang.org/x/mod/module/module.go
generated
vendored
15
vendor/golang.org/x/mod/module/module.go
generated
vendored
|
@ -15,7 +15,7 @@
|
|||
// but additional checking functions, most notably Check, verify that
|
||||
// a particular path, version pair is valid.
|
||||
//
|
||||
// Escaped Paths
|
||||
// # Escaped Paths
|
||||
//
|
||||
// Module paths appear as substrings of file system paths
|
||||
// (in the download cache) and of web server URLs in the proxy protocol.
|
||||
|
@ -55,7 +55,7 @@
|
|||
// Import paths have never allowed exclamation marks, so there is no
|
||||
// need to define how to escape a literal !.
|
||||
//
|
||||
// Unicode Restrictions
|
||||
// # Unicode Restrictions
|
||||
//
|
||||
// Today, paths are disallowed from using Unicode.
|
||||
//
|
||||
|
@ -102,9 +102,9 @@ import (
|
|||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
"errors"
|
||||
|
||||
"golang.org/x/mod/semver"
|
||||
errors "golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// A Version (for clients, a module.Version) is defined by a module path and version pair.
|
||||
|
@ -286,12 +286,7 @@ func fileNameOK(r rune) bool {
|
|||
if '0' <= r && r <= '9' || 'A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' {
|
||||
return true
|
||||
}
|
||||
for i := 0; i < len(allowed); i++ {
|
||||
if rune(allowed[i]) == r {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return strings.ContainsRune(allowed, r)
|
||||
}
|
||||
// It may be OK to add more ASCII punctuation here, but only carefully.
|
||||
// For example Windows disallows < > \, and macOS disallows :, so we must not allow those.
|
||||
|
@ -803,6 +798,7 @@ func unescapeString(escaped string) (string, bool) {
|
|||
// GOPRIVATE environment variable, as described by 'go help module-private'.
|
||||
//
|
||||
// It ignores any empty or malformed patterns in the list.
|
||||
// Trailing slashes on patterns are ignored.
|
||||
func MatchPrefixPatterns(globs, target string) bool {
|
||||
for globs != "" {
|
||||
// Extract next non-empty glob in comma-separated list.
|
||||
|
@ -812,6 +808,7 @@ func MatchPrefixPatterns(globs, target string) bool {
|
|||
} else {
|
||||
glob, globs = globs, ""
|
||||
}
|
||||
glob = strings.TrimSuffix(glob, "/")
|
||||
if glob == "" {
|
||||
continue
|
||||
}
|
||||
|
|
10
vendor/golang.org/x/mod/semver/semver.go
generated
vendored
10
vendor/golang.org/x/mod/semver/semver.go
generated
vendored
|
@ -32,7 +32,6 @@ type parsed struct {
|
|||
short string
|
||||
prerelease string
|
||||
build string
|
||||
err string
|
||||
}
|
||||
|
||||
// IsValid reports whether v is a valid semantic version string.
|
||||
|
@ -172,12 +171,10 @@ func Sort(list []string) {
|
|||
|
||||
func parse(v string) (p parsed, ok bool) {
|
||||
if v == "" || v[0] != 'v' {
|
||||
p.err = "missing v prefix"
|
||||
return
|
||||
}
|
||||
p.major, v, ok = parseInt(v[1:])
|
||||
if !ok {
|
||||
p.err = "bad major version"
|
||||
return
|
||||
}
|
||||
if v == "" {
|
||||
|
@ -187,13 +184,11 @@ func parse(v string) (p parsed, ok bool) {
|
|||
return
|
||||
}
|
||||
if v[0] != '.' {
|
||||
p.err = "bad minor prefix"
|
||||
ok = false
|
||||
return
|
||||
}
|
||||
p.minor, v, ok = parseInt(v[1:])
|
||||
if !ok {
|
||||
p.err = "bad minor version"
|
||||
return
|
||||
}
|
||||
if v == "" {
|
||||
|
@ -202,31 +197,26 @@ func parse(v string) (p parsed, ok bool) {
|
|||
return
|
||||
}
|
||||
if v[0] != '.' {
|
||||
p.err = "bad patch prefix"
|
||||
ok = false
|
||||
return
|
||||
}
|
||||
p.patch, v, ok = parseInt(v[1:])
|
||||
if !ok {
|
||||
p.err = "bad patch version"
|
||||
return
|
||||
}
|
||||
if len(v) > 0 && v[0] == '-' {
|
||||
p.prerelease, v, ok = parsePrerelease(v)
|
||||
if !ok {
|
||||
p.err = "bad prerelease"
|
||||
return
|
||||
}
|
||||
}
|
||||
if len(v) > 0 && v[0] == '+' {
|
||||
p.build, v, ok = parseBuild(v)
|
||||
if !ok {
|
||||
p.err = "bad build"
|
||||
return
|
||||
}
|
||||
}
|
||||
if v != "" {
|
||||
p.err = "junk on end"
|
||||
ok = false
|
||||
return
|
||||
}
|
||||
|
|
3
vendor/golang.org/x/sync/AUTHORS
generated
vendored
3
vendor/golang.org/x/sync/AUTHORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code refers to The Go Authors for copyright purposes.
|
||||
# The master list of authors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/AUTHORS.
|
3
vendor/golang.org/x/sync/CONTRIBUTORS
generated
vendored
3
vendor/golang.org/x/sync/CONTRIBUTORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code was written by the Go contributors.
|
||||
# The master list of contributors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/CONTRIBUTORS.
|
3
vendor/golang.org/x/text/AUTHORS
generated
vendored
3
vendor/golang.org/x/text/AUTHORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code refers to The Go Authors for copyright purposes.
|
||||
# The master list of authors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/AUTHORS.
|
3
vendor/golang.org/x/text/CONTRIBUTORS
generated
vendored
3
vendor/golang.org/x/text/CONTRIBUTORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code was written by the Go contributors.
|
||||
# The master list of contributors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/CONTRIBUTORS.
|
6
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
6
vendor/golang.org/x/text/internal/language/compact/tables.go
generated
vendored
|
@ -966,7 +966,7 @@ var coreTags = []language.CompactCoreInfo{ // 773 elements
|
|||
0x3fd00000, 0x3fd00072, 0x3fd000da, 0x3fd0010c,
|
||||
0x3ff00000, 0x3ff000d1, 0x40100000, 0x401000c3,
|
||||
0x40200000, 0x4020004c, 0x40700000, 0x40800000,
|
||||
0x4085a000, 0x4085a0ba, 0x408e3000, 0x408e30ba,
|
||||
0x4085a000, 0x4085a0ba, 0x408e8000, 0x408e80ba,
|
||||
0x40c00000, 0x40c000b3, 0x41200000, 0x41200111,
|
||||
0x41600000, 0x4160010f, 0x41c00000, 0x41d00000,
|
||||
// Entry 280 - 29F
|
||||
|
@ -994,7 +994,7 @@ var coreTags = []language.CompactCoreInfo{ // 773 elements
|
|||
0x4ae00130, 0x4b400000, 0x4b400099, 0x4b4000e8,
|
||||
0x4bc00000, 0x4bc05000, 0x4bc05024, 0x4bc20000,
|
||||
0x4bc20137, 0x4bc5a000, 0x4bc5a137, 0x4be00000,
|
||||
0x4be5a000, 0x4be5a0b4, 0x4beeb000, 0x4beeb0b4,
|
||||
0x4be5a000, 0x4be5a0b4, 0x4bef1000, 0x4bef10b4,
|
||||
0x4c000000, 0x4c300000, 0x4c30013e, 0x4c900000,
|
||||
// Entry 2E0 - 2FF
|
||||
0x4c900001, 0x4cc00000, 0x4cc0012f, 0x4ce00000,
|
||||
|
@ -1012,4 +1012,4 @@ var coreTags = []language.CompactCoreInfo{ // 773 elements
|
|||
|
||||
const specialTagsStr string = "ca-ES-valencia en-US-u-va-posix"
|
||||
|
||||
// Total table size 3147 bytes (3KiB); checksum: BE816D44
|
||||
// Total table size 3147 bytes (3KiB); checksum: 6772C83C
|
||||
|
|
2
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
2
vendor/golang.org/x/text/internal/language/lookup.go
generated
vendored
|
@ -328,7 +328,7 @@ func (r Region) IsPrivateUse() bool {
|
|||
return r.typ()&iso3166UserAssigned != 0
|
||||
}
|
||||
|
||||
type Script uint8
|
||||
type Script uint16
|
||||
|
||||
// getScriptID returns the script id for string s. It assumes that s
|
||||
// is of the format [A-Z][a-z]{3}.
|
||||
|
|
24
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
24
vendor/golang.org/x/text/internal/language/parse.go
generated
vendored
|
@ -270,7 +270,7 @@ func parse(scan *scanner, s string) (t Tag, err error) {
|
|||
} else if n >= 4 {
|
||||
return Und, ErrSyntax
|
||||
} else { // the usual case
|
||||
t, end = parseTag(scan)
|
||||
t, end = parseTag(scan, true)
|
||||
if n := len(scan.token); n == 1 {
|
||||
t.pExt = uint16(end)
|
||||
end = parseExtensions(scan)
|
||||
|
@ -296,7 +296,8 @@ func parse(scan *scanner, s string) (t Tag, err error) {
|
|||
|
||||
// parseTag parses language, script, region and variants.
|
||||
// It returns a Tag and the end position in the input that was parsed.
|
||||
func parseTag(scan *scanner) (t Tag, end int) {
|
||||
// If doNorm is true, then <lang>-<extlang> will be normalized to <extlang>.
|
||||
func parseTag(scan *scanner, doNorm bool) (t Tag, end int) {
|
||||
var e error
|
||||
// TODO: set an error if an unknown lang, script or region is encountered.
|
||||
t.LangID, e = getLangID(scan.token)
|
||||
|
@ -307,14 +308,17 @@ func parseTag(scan *scanner) (t Tag, end int) {
|
|||
for len(scan.token) == 3 && isAlpha(scan.token[0]) {
|
||||
// From http://tools.ietf.org/html/bcp47, <lang>-<extlang> tags are equivalent
|
||||
// to a tag of the form <extlang>.
|
||||
lang, e := getLangID(scan.token)
|
||||
if lang != 0 {
|
||||
t.LangID = lang
|
||||
copy(scan.b[langStart:], lang.String())
|
||||
scan.b[langStart+3] = '-'
|
||||
scan.start = langStart + 4
|
||||
if doNorm {
|
||||
lang, e := getLangID(scan.token)
|
||||
if lang != 0 {
|
||||
t.LangID = lang
|
||||
langStr := lang.String()
|
||||
copy(scan.b[langStart:], langStr)
|
||||
scan.b[langStart+len(langStr)] = '-'
|
||||
scan.start = langStart + len(langStr) + 1
|
||||
}
|
||||
scan.gobble(e)
|
||||
}
|
||||
scan.gobble(e)
|
||||
end = scan.scan()
|
||||
}
|
||||
if len(scan.token) == 4 && isAlpha(scan.token[0]) {
|
||||
|
@ -559,7 +563,7 @@ func parseExtension(scan *scanner) int {
|
|||
case 't': // https://www.ietf.org/rfc/rfc6497.txt
|
||||
scan.scan()
|
||||
if n := len(scan.token); n >= 2 && n <= 3 && isAlpha(scan.token[1]) {
|
||||
_, end = parseTag(scan)
|
||||
_, end = parseTag(scan, false)
|
||||
scan.toLower(start, end)
|
||||
}
|
||||
for len(scan.token) == 2 && !isAlpha(scan.token[1]) {
|
||||
|
|
868
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
868
vendor/golang.org/x/text/internal/language/tables.go
generated
vendored
File diff suppressed because it is too large
Load diff
44
vendor/golang.org/x/text/language/doc.go
generated
vendored
44
vendor/golang.org/x/text/language/doc.go
generated
vendored
|
@ -10,18 +10,17 @@
|
|||
// and provides the user with the best experience
|
||||
// (see https://blog.golang.org/matchlang).
|
||||
//
|
||||
//
|
||||
// Matching preferred against supported languages
|
||||
// # Matching preferred against supported languages
|
||||
//
|
||||
// A Matcher for an application that supports English, Australian English,
|
||||
// Danish, and standard Mandarin can be created as follows:
|
||||
//
|
||||
// var matcher = language.NewMatcher([]language.Tag{
|
||||
// language.English, // The first language is used as fallback.
|
||||
// language.MustParse("en-AU"),
|
||||
// language.Danish,
|
||||
// language.Chinese,
|
||||
// })
|
||||
// var matcher = language.NewMatcher([]language.Tag{
|
||||
// language.English, // The first language is used as fallback.
|
||||
// language.MustParse("en-AU"),
|
||||
// language.Danish,
|
||||
// language.Chinese,
|
||||
// })
|
||||
//
|
||||
// This list of supported languages is typically implied by the languages for
|
||||
// which there exists translations of the user interface.
|
||||
|
@ -30,14 +29,14 @@
|
|||
// language tags.
|
||||
// The MatchString finds best matches for such strings:
|
||||
//
|
||||
// handler(w http.ResponseWriter, r *http.Request) {
|
||||
// lang, _ := r.Cookie("lang")
|
||||
// accept := r.Header.Get("Accept-Language")
|
||||
// tag, _ := language.MatchStrings(matcher, lang.String(), accept)
|
||||
// handler(w http.ResponseWriter, r *http.Request) {
|
||||
// lang, _ := r.Cookie("lang")
|
||||
// accept := r.Header.Get("Accept-Language")
|
||||
// tag, _ := language.MatchStrings(matcher, lang.String(), accept)
|
||||
//
|
||||
// // tag should now be used for the initialization of any
|
||||
// // locale-specific service.
|
||||
// }
|
||||
// // tag should now be used for the initialization of any
|
||||
// // locale-specific service.
|
||||
// }
|
||||
//
|
||||
// The Matcher's Match method can be used to match Tags directly.
|
||||
//
|
||||
|
@ -48,8 +47,7 @@
|
|||
// For instance, it will know that a reader of Bokmål Danish can read Norwegian
|
||||
// and will know that Cantonese ("yue") is a good match for "zh-HK".
|
||||
//
|
||||
//
|
||||
// Using match results
|
||||
// # Using match results
|
||||
//
|
||||
// To guarantee a consistent user experience to the user it is important to
|
||||
// use the same language tag for the selection of any locale-specific services.
|
||||
|
@ -58,9 +56,9 @@
|
|||
// More subtly confusing is using the wrong sorting order or casing
|
||||
// algorithm for a certain language.
|
||||
//
|
||||
// All the packages in x/text that provide locale-specific services
|
||||
// (e.g. collate, cases) should be initialized with the tag that was
|
||||
// obtained at the start of an interaction with the user.
|
||||
// All the packages in x/text that provide locale-specific services
|
||||
// (e.g. collate, cases) should be initialized with the tag that was
|
||||
// obtained at the start of an interaction with the user.
|
||||
//
|
||||
// Note that Tag that is returned by Match and MatchString may differ from any
|
||||
// of the supported languages, as it may contain carried over settings from
|
||||
|
@ -70,8 +68,7 @@
|
|||
// Match and MatchString both return the index of the matched supported tag
|
||||
// to simplify associating such data with the matched tag.
|
||||
//
|
||||
//
|
||||
// Canonicalization
|
||||
// # Canonicalization
|
||||
//
|
||||
// If one uses the Matcher to compare languages one does not need to
|
||||
// worry about canonicalization.
|
||||
|
@ -92,10 +89,9 @@
|
|||
// equivalence relations. The CanonType type can be used to alter the
|
||||
// canonicalization form.
|
||||
//
|
||||
// References
|
||||
// # References
|
||||
//
|
||||
// BCP 47 - Tags for Identifying Languages http://tools.ietf.org/html/bcp47
|
||||
//
|
||||
package language // import "golang.org/x/text/language"
|
||||
|
||||
// TODO: explanation on how to match languages for your own locale-specific
|
||||
|
|
2
vendor/golang.org/x/text/language/match.go
generated
vendored
2
vendor/golang.org/x/text/language/match.go
generated
vendored
|
@ -545,7 +545,7 @@ type bestMatch struct {
|
|||
// match as the preferred match.
|
||||
//
|
||||
// If pin is true and have and tag are a strong match, it will henceforth only
|
||||
// consider matches for this language. This corresponds to the nothing that most
|
||||
// consider matches for this language. This corresponds to the idea that most
|
||||
// users have a strong preference for the first defined language. A user can
|
||||
// still prefer a second language over a dialect of the preferred language by
|
||||
// explicitly specifying dialects, e.g. "en, nl, en-GB". In this case pin should
|
||||
|
|
5
vendor/golang.org/x/text/language/parse.go
generated
vendored
5
vendor/golang.org/x/text/language/parse.go
generated
vendored
|
@ -147,6 +147,7 @@ func update(b *language.Builder, part ...interface{}) (err error) {
|
|||
}
|
||||
|
||||
var errInvalidWeight = errors.New("ParseAcceptLanguage: invalid weight")
|
||||
var errTagListTooLarge = errors.New("tag list exceeds max length")
|
||||
|
||||
// ParseAcceptLanguage parses the contents of an Accept-Language header as
|
||||
// defined in http://www.ietf.org/rfc/rfc2616.txt and returns a list of Tags and
|
||||
|
@ -164,6 +165,10 @@ func ParseAcceptLanguage(s string) (tag []Tag, q []float32, err error) {
|
|||
}
|
||||
}()
|
||||
|
||||
if strings.Count(s, "-") > 1000 {
|
||||
return nil, nil, errTagListTooLarge
|
||||
}
|
||||
|
||||
var entry string
|
||||
for s != "" {
|
||||
if entry, s = split(s, ','); entry == "" {
|
||||
|
|
18
vendor/golang.org/x/text/language/tables.go
generated
vendored
18
vendor/golang.org/x/text/language/tables.go
generated
vendored
|
@ -39,12 +39,12 @@ const (
|
|||
_Hani = 57
|
||||
_Hans = 59
|
||||
_Hant = 60
|
||||
_Qaaa = 143
|
||||
_Qaai = 151
|
||||
_Qabx = 192
|
||||
_Zinh = 245
|
||||
_Zyyy = 250
|
||||
_Zzzz = 251
|
||||
_Qaaa = 147
|
||||
_Qaai = 155
|
||||
_Qabx = 196
|
||||
_Zinh = 252
|
||||
_Zyyy = 257
|
||||
_Zzzz = 258
|
||||
)
|
||||
|
||||
var regionToGroups = []uint8{ // 358 elements
|
||||
|
@ -265,9 +265,9 @@ var matchScript = []scriptIntelligibility{ // 26 elements
|
|||
13: {wantLang: 0x39d, haveLang: 0x139, wantScript: 0x36, haveScript: 0x5a, distance: 0xa},
|
||||
14: {wantLang: 0x3be, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
||||
15: {wantLang: 0x3fa, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
||||
16: {wantLang: 0x40c, haveLang: 0x139, wantScript: 0xcf, haveScript: 0x5a, distance: 0xa},
|
||||
17: {wantLang: 0x450, haveLang: 0x139, wantScript: 0xde, haveScript: 0x5a, distance: 0xa},
|
||||
18: {wantLang: 0x461, haveLang: 0x139, wantScript: 0xe1, haveScript: 0x5a, distance: 0xa},
|
||||
16: {wantLang: 0x40c, haveLang: 0x139, wantScript: 0xd4, haveScript: 0x5a, distance: 0xa},
|
||||
17: {wantLang: 0x450, haveLang: 0x139, wantScript: 0xe3, haveScript: 0x5a, distance: 0xa},
|
||||
18: {wantLang: 0x461, haveLang: 0x139, wantScript: 0xe6, haveScript: 0x5a, distance: 0xa},
|
||||
19: {wantLang: 0x46f, haveLang: 0x139, wantScript: 0x2c, haveScript: 0x5a, distance: 0xa},
|
||||
20: {wantLang: 0x476, haveLang: 0x3e2, wantScript: 0x5a, haveScript: 0x20, distance: 0xa},
|
||||
21: {wantLang: 0x4b4, haveLang: 0x139, wantScript: 0x5, haveScript: 0x5a, distance: 0xa},
|
||||
|
|
26
vendor/golang.org/x/text/unicode/bidi/core.go
generated
vendored
26
vendor/golang.org/x/text/unicode/bidi/core.go
generated
vendored
|
@ -193,14 +193,14 @@ func (p *paragraph) run() {
|
|||
//
|
||||
// At the end of this function:
|
||||
//
|
||||
// - The member variable matchingPDI is set to point to the index of the
|
||||
// matching PDI character for each isolate initiator character. If there is
|
||||
// no matching PDI, it is set to the length of the input text. For other
|
||||
// characters, it is set to -1.
|
||||
// - The member variable matchingIsolateInitiator is set to point to the
|
||||
// index of the matching isolate initiator character for each PDI character.
|
||||
// If there is no matching isolate initiator, or the character is not a PDI,
|
||||
// it is set to -1.
|
||||
// - The member variable matchingPDI is set to point to the index of the
|
||||
// matching PDI character for each isolate initiator character. If there is
|
||||
// no matching PDI, it is set to the length of the input text. For other
|
||||
// characters, it is set to -1.
|
||||
// - The member variable matchingIsolateInitiator is set to point to the
|
||||
// index of the matching isolate initiator character for each PDI character.
|
||||
// If there is no matching isolate initiator, or the character is not a PDI,
|
||||
// it is set to -1.
|
||||
func (p *paragraph) determineMatchingIsolates() {
|
||||
p.matchingPDI = make([]int, p.Len())
|
||||
p.matchingIsolateInitiator = make([]int, p.Len())
|
||||
|
@ -435,7 +435,7 @@ func maxLevel(a, b level) level {
|
|||
}
|
||||
|
||||
// Rule X10, second bullet: Determine the start-of-sequence (sos) and end-of-sequence (eos) types,
|
||||
// either L or R, for each isolating run sequence.
|
||||
// either L or R, for each isolating run sequence.
|
||||
func (p *paragraph) isolatingRunSequence(indexes []int) *isolatingRunSequence {
|
||||
length := len(indexes)
|
||||
types := make([]Class, length)
|
||||
|
@ -495,9 +495,9 @@ func (s *isolatingRunSequence) resolveWeakTypes() {
|
|||
if t == NSM {
|
||||
s.types[i] = precedingCharacterType
|
||||
} else {
|
||||
if t.in(LRI, RLI, FSI, PDI) {
|
||||
precedingCharacterType = ON
|
||||
}
|
||||
// if t.in(LRI, RLI, FSI, PDI) {
|
||||
// precedingCharacterType = ON
|
||||
// }
|
||||
precedingCharacterType = t
|
||||
}
|
||||
}
|
||||
|
@ -905,7 +905,7 @@ func (p *paragraph) getLevels(linebreaks []int) []level {
|
|||
// Lines are concatenated from left to right. So for example, the fifth
|
||||
// character from the left on the third line is
|
||||
//
|
||||
// getReordering(linebreaks)[linebreaks[1] + 4]
|
||||
// getReordering(linebreaks)[linebreaks[1] + 4]
|
||||
//
|
||||
// (linebreaks[1] is the position after the last character of the second
|
||||
// line, which is also the index of the first character on the third line,
|
||||
|
|
9
vendor/golang.org/x/text/unicode/norm/forminfo.go
generated
vendored
9
vendor/golang.org/x/text/unicode/norm/forminfo.go
generated
vendored
|
@ -110,10 +110,11 @@ func (p Properties) BoundaryAfter() bool {
|
|||
}
|
||||
|
||||
// We pack quick check data in 4 bits:
|
||||
// 5: Combines forward (0 == false, 1 == true)
|
||||
// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
|
||||
// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
|
||||
// 1..0: Number of trailing non-starters.
|
||||
//
|
||||
// 5: Combines forward (0 == false, 1 == true)
|
||||
// 4..3: NFC_QC Yes(00), No (10), or Maybe (11)
|
||||
// 2: NFD_QC Yes (0) or No (1). No also means there is a decomposition.
|
||||
// 1..0: Number of trailing non-starters.
|
||||
//
|
||||
// When all 4 bits are zero, the character is inert, meaning it is never
|
||||
// influenced by normalization.
|
||||
|
|
11
vendor/golang.org/x/text/unicode/norm/normalize.go
generated
vendored
11
vendor/golang.org/x/text/unicode/norm/normalize.go
generated
vendored
|
@ -18,16 +18,17 @@ import (
|
|||
// A Form denotes a canonical representation of Unicode code points.
|
||||
// The Unicode-defined normalization and equivalence forms are:
|
||||
//
|
||||
// NFC Unicode Normalization Form C
|
||||
// NFD Unicode Normalization Form D
|
||||
// NFKC Unicode Normalization Form KC
|
||||
// NFKD Unicode Normalization Form KD
|
||||
// NFC Unicode Normalization Form C
|
||||
// NFD Unicode Normalization Form D
|
||||
// NFKC Unicode Normalization Form KC
|
||||
// NFKD Unicode Normalization Form KD
|
||||
//
|
||||
// For a Form f, this documentation uses the notation f(x) to mean
|
||||
// the bytes or string x converted to the given form.
|
||||
// A position n in x is called a boundary if conversion to the form can
|
||||
// proceed independently on both sides:
|
||||
// f(x) == append(f(x[0:n]), f(x[n:])...)
|
||||
//
|
||||
// f(x) == append(f(x[0:n]), f(x[n:])...)
|
||||
//
|
||||
// References: https://unicode.org/reports/tr15/ and
|
||||
// https://unicode.org/notes/tn5/.
|
||||
|
|
4
vendor/golang.org/x/text/unicode/norm/tables13.0.0.go
generated
vendored
4
vendor/golang.org/x/text/unicode/norm/tables13.0.0.go
generated
vendored
|
@ -7315,7 +7315,7 @@ const recompMapPacked = "" +
|
|||
"\x00V\x03\x03\x00\x00\x1e|" + // 0x00560303: 0x00001E7C
|
||||
"\x00v\x03\x03\x00\x00\x1e}" + // 0x00760303: 0x00001E7D
|
||||
"\x00V\x03#\x00\x00\x1e~" + // 0x00560323: 0x00001E7E
|
||||
"\x00v\x03#\x00\x00\x1e\u007f" + // 0x00760323: 0x00001E7F
|
||||
"\x00v\x03#\x00\x00\x1e\x7f" + // 0x00760323: 0x00001E7F
|
||||
"\x00W\x03\x00\x00\x00\x1e\x80" + // 0x00570300: 0x00001E80
|
||||
"\x00w\x03\x00\x00\x00\x1e\x81" + // 0x00770300: 0x00001E81
|
||||
"\x00W\x03\x01\x00\x00\x1e\x82" + // 0x00570301: 0x00001E82
|
||||
|
@ -7342,7 +7342,7 @@ const recompMapPacked = "" +
|
|||
"\x00t\x03\b\x00\x00\x1e\x97" + // 0x00740308: 0x00001E97
|
||||
"\x00w\x03\n\x00\x00\x1e\x98" + // 0x0077030A: 0x00001E98
|
||||
"\x00y\x03\n\x00\x00\x1e\x99" + // 0x0079030A: 0x00001E99
|
||||
"\x01\u007f\x03\a\x00\x00\x1e\x9b" + // 0x017F0307: 0x00001E9B
|
||||
"\x01\x7f\x03\a\x00\x00\x1e\x9b" + // 0x017F0307: 0x00001E9B
|
||||
"\x00A\x03#\x00\x00\x1e\xa0" + // 0x00410323: 0x00001EA0
|
||||
"\x00a\x03#\x00\x00\x1e\xa1" + // 0x00610323: 0x00001EA1
|
||||
"\x00A\x03\t\x00\x00\x1e\xa2" + // 0x00410309: 0x00001EA2
|
||||
|
|
24
vendor/golang.org/x/text/width/tables10.0.0.go
generated
vendored
24
vendor/golang.org/x/text/width/tables10.0.0.go
generated
vendored
|
@ -1146,21 +1146,31 @@ var widthIndex = [1408]uint8{
|
|||
}
|
||||
|
||||
// inverseData contains 4-byte entries of the following format:
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the
|
||||
// UTF-8 encoding of the original rune. Mappings often have the following
|
||||
// pattern:
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// By xor-ing the last byte the same entry can be shared by many mappings. This
|
||||
// reduces the total number of distinct entries by about two thirds.
|
||||
// The resulting entry for the aforementioned mappings is
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// Note that because of the xor-ing, the byte sequence stored in the entry is
|
||||
// not valid UTF-8.
|
||||
var inverseData = [150][4]byte{
|
||||
|
|
24
vendor/golang.org/x/text/width/tables11.0.0.go
generated
vendored
24
vendor/golang.org/x/text/width/tables11.0.0.go
generated
vendored
|
@ -1158,21 +1158,31 @@ var widthIndex = [1408]uint8{
|
|||
}
|
||||
|
||||
// inverseData contains 4-byte entries of the following format:
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the
|
||||
// UTF-8 encoding of the original rune. Mappings often have the following
|
||||
// pattern:
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// By xor-ing the last byte the same entry can be shared by many mappings. This
|
||||
// reduces the total number of distinct entries by about two thirds.
|
||||
// The resulting entry for the aforementioned mappings is
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// Note that because of the xor-ing, the byte sequence stored in the entry is
|
||||
// not valid UTF-8.
|
||||
var inverseData = [150][4]byte{
|
||||
|
|
24
vendor/golang.org/x/text/width/tables12.0.0.go
generated
vendored
24
vendor/golang.org/x/text/width/tables12.0.0.go
generated
vendored
|
@ -1178,21 +1178,31 @@ var widthIndex = [1408]uint8{
|
|||
}
|
||||
|
||||
// inverseData contains 4-byte entries of the following format:
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the
|
||||
// UTF-8 encoding of the original rune. Mappings often have the following
|
||||
// pattern:
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// By xor-ing the last byte the same entry can be shared by many mappings. This
|
||||
// reduces the total number of distinct entries by about two thirds.
|
||||
// The resulting entry for the aforementioned mappings is
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// Note that because of the xor-ing, the byte sequence stored in the entry is
|
||||
// not valid UTF-8.
|
||||
var inverseData = [150][4]byte{
|
||||
|
|
24
vendor/golang.org/x/text/width/tables13.0.0.go
generated
vendored
24
vendor/golang.org/x/text/width/tables13.0.0.go
generated
vendored
|
@ -1179,21 +1179,31 @@ var widthIndex = [1408]uint8{
|
|||
}
|
||||
|
||||
// inverseData contains 4-byte entries of the following format:
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the
|
||||
// UTF-8 encoding of the original rune. Mappings often have the following
|
||||
// pattern:
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// By xor-ing the last byte the same entry can be shared by many mappings. This
|
||||
// reduces the total number of distinct entries by about two thirds.
|
||||
// The resulting entry for the aforementioned mappings is
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// Note that because of the xor-ing, the byte sequence stored in the entry is
|
||||
// not valid UTF-8.
|
||||
var inverseData = [150][4]byte{
|
||||
|
|
24
vendor/golang.org/x/text/width/tables9.0.0.go
generated
vendored
24
vendor/golang.org/x/text/width/tables9.0.0.go
generated
vendored
|
@ -1114,21 +1114,31 @@ var widthIndex = [1408]uint8{
|
|||
}
|
||||
|
||||
// inverseData contains 4-byte entries of the following format:
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// <length> <modified UTF-8-encoded rune> <0 padding>
|
||||
//
|
||||
// The last byte of the UTF-8-encoded rune is xor-ed with the last byte of the
|
||||
// UTF-8 encoding of the original rune. Mappings often have the following
|
||||
// pattern:
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// A -> A (U+FF21 -> U+0041)
|
||||
// B -> B (U+FF22 -> U+0042)
|
||||
// ...
|
||||
//
|
||||
// By xor-ing the last byte the same entry can be shared by many mappings. This
|
||||
// reduces the total number of distinct entries by about two thirds.
|
||||
// The resulting entry for the aforementioned mappings is
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// { 0x01, 0xE0, 0x00, 0x00 }
|
||||
//
|
||||
// Using this entry to map U+FF21 (UTF-8 [EF BC A1]), we get
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// E0 ^ A1 = 41.
|
||||
//
|
||||
// Similarly, for U+FF22 (UTF-8 [EF BC A2]), we get
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// E0 ^ A2 = 42.
|
||||
//
|
||||
// Note that because of the xor-ing, the byte sequence stored in the entry is
|
||||
// not valid UTF-8.
|
||||
var inverseData = [150][4]byte{
|
||||
|
|
3
vendor/golang.org/x/tools/AUTHORS
generated
vendored
3
vendor/golang.org/x/tools/AUTHORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code refers to The Go Authors for copyright purposes.
|
||||
# The master list of authors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/AUTHORS.
|
3
vendor/golang.org/x/tools/CONTRIBUTORS
generated
vendored
3
vendor/golang.org/x/tools/CONTRIBUTORS
generated
vendored
|
@ -1,3 +0,0 @@
|
|||
# This source code was written by the Go contributors.
|
||||
# The master list of contributors is in the main Go distribution,
|
||||
# visible at http://tip.golang.org/CONTRIBUTORS.
|
23
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
23
vendor/golang.org/x/tools/go/analysis/doc.go
generated
vendored
|
@ -3,12 +3,10 @@
|
|||
// license that can be found in the LICENSE file.
|
||||
|
||||
/*
|
||||
|
||||
Package analysis defines the interface between a modular static
|
||||
analysis and an analysis driver program.
|
||||
|
||||
|
||||
Background
|
||||
# Background
|
||||
|
||||
A static analysis is a function that inspects a package of Go code and
|
||||
reports a set of diagnostics (typically mistakes in the code), and
|
||||
|
@ -32,8 +30,7 @@ frameworks, code review tools, code-base indexers (such as SourceGraph),
|
|||
documentation viewers (such as godoc), batch pipelines for large code
|
||||
bases, and so on.
|
||||
|
||||
|
||||
Analyzer
|
||||
# Analyzer
|
||||
|
||||
The primary type in the API is Analyzer. An Analyzer statically
|
||||
describes an analysis function: its name, documentation, flags,
|
||||
|
@ -115,8 +112,7 @@ Finally, the Run field contains a function to be called by the driver to
|
|||
execute the analysis on a single package. The driver passes it an
|
||||
instance of the Pass type.
|
||||
|
||||
|
||||
Pass
|
||||
# Pass
|
||||
|
||||
A Pass describes a single unit of work: the application of a particular
|
||||
Analyzer to a particular package of Go code.
|
||||
|
@ -202,8 +198,7 @@ raw text file, use the following sequence:
|
|||
...
|
||||
pass.Reportf(tf.LineStart(line), "oops")
|
||||
|
||||
|
||||
Modular analysis with Facts
|
||||
# Modular analysis with Facts
|
||||
|
||||
To improve efficiency and scalability, large programs are routinely
|
||||
built using separate compilation: units of the program are compiled
|
||||
|
@ -246,6 +241,9 @@ Consequently, Facts must be serializable. The API requires that drivers
|
|||
use the gob encoding, an efficient, robust, self-describing binary
|
||||
protocol. A fact type may implement the GobEncoder/GobDecoder interfaces
|
||||
if the default encoding is unsuitable. Facts should be stateless.
|
||||
Because serialized facts may appear within build outputs, the gob encoding
|
||||
of a fact must be deterministic, to avoid spurious cache misses in
|
||||
build systems that use content-addressable caches.
|
||||
|
||||
The Pass type has functions to import and export facts,
|
||||
associated either with an object or with a package:
|
||||
|
@ -280,8 +278,7 @@ this fact is built in to the analyzer so that it correctly checks
|
|||
calls to log.Printf even when run in a driver that does not apply
|
||||
it to standard packages. We would like to remove this limitation in future.
|
||||
|
||||
|
||||
Testing an Analyzer
|
||||
# Testing an Analyzer
|
||||
|
||||
The analysistest subpackage provides utilities for testing an Analyzer.
|
||||
In a few lines of code, it is possible to run an analyzer on a package
|
||||
|
@ -289,8 +286,7 @@ of testdata files and check that it reported all the expected
|
|||
diagnostics and facts (and no more). Expectations are expressed using
|
||||
"// want ..." comments in the input code.
|
||||
|
||||
|
||||
Standalone commands
|
||||
# Standalone commands
|
||||
|
||||
Analyzers are provided in the form of packages that a driver program is
|
||||
expected to import. The vet command imports a set of several analyzers,
|
||||
|
@ -316,6 +312,5 @@ entirety as:
|
|||
|
||||
A tool that provides multiple analyzers can use multichecker in a
|
||||
similar way, giving it the list of Analyzers.
|
||||
|
||||
*/
|
||||
package analysis
|
||||
|
|
12
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/arches_go118.go
generated
vendored
Normal file
12
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/arches_go118.go
generated
vendored
Normal file
|
@ -0,0 +1,12 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build !go1.19
|
||||
// +build !go1.19
|
||||
|
||||
package asmdecl
|
||||
|
||||
func additionalArches() []*asmArch {
|
||||
return nil
|
||||
}
|
14
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/arches_go119.go
generated
vendored
Normal file
14
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/arches_go119.go
generated
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build go1.19
|
||||
// +build go1.19
|
||||
|
||||
package asmdecl
|
||||
|
||||
var asmArchLoong64 = asmArch{name: "loong64", bigEndian: false, stack: "R3", lr: true}
|
||||
|
||||
func additionalArches() []*asmArch {
|
||||
return []*asmArch{&asmArchLoong64}
|
||||
}
|
7
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
generated
vendored
7
vendor/golang.org/x/tools/go/analysis/passes/asmdecl/asmdecl.go
generated
vendored
|
@ -90,8 +90,8 @@ var (
|
|||
asmArchMipsLE = asmArch{name: "mipsle", bigEndian: false, stack: "R29", lr: true}
|
||||
asmArchMips64 = asmArch{name: "mips64", bigEndian: true, stack: "R29", lr: true}
|
||||
asmArchMips64LE = asmArch{name: "mips64le", bigEndian: false, stack: "R29", lr: true}
|
||||
asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true}
|
||||
asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true}
|
||||
asmArchPpc64 = asmArch{name: "ppc64", bigEndian: true, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
|
||||
asmArchPpc64LE = asmArch{name: "ppc64le", bigEndian: false, stack: "R1", lr: true, retRegs: []string{"R3", "F1"}}
|
||||
asmArchRISCV64 = asmArch{name: "riscv64", bigEndian: false, stack: "SP", lr: true}
|
||||
asmArchS390X = asmArch{name: "s390x", bigEndian: true, stack: "R15", lr: true}
|
||||
asmArchWasm = asmArch{name: "wasm", bigEndian: false, stack: "SP", lr: false}
|
||||
|
@ -114,6 +114,7 @@ var (
|
|||
)
|
||||
|
||||
func init() {
|
||||
arches = append(arches, additionalArches()...)
|
||||
for _, arch := range arches {
|
||||
arch.sizes = types.SizesFor("gc", arch.name)
|
||||
if arch.sizes == nil {
|
||||
|
@ -731,7 +732,7 @@ func asmCheckVar(badf func(string, ...interface{}), fn *asmFunc, line, expr stri
|
|||
src = 8
|
||||
}
|
||||
}
|
||||
case "mips", "mipsle", "mips64", "mips64le":
|
||||
case "loong64", "mips", "mipsle", "mips64", "mips64le":
|
||||
switch op {
|
||||
case "MOVB", "MOVBU":
|
||||
src = 1
|
||||
|
|
12
vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go
generated
vendored
12
vendor/golang.org/x/tools/go/analysis/passes/bools/bools.go
generated
vendored
|
@ -94,8 +94,10 @@ func (op boolOp) commutativeSets(info *types.Info, e *ast.BinaryExpr, seen map[*
|
|||
}
|
||||
|
||||
// checkRedundant checks for expressions of the form
|
||||
// e && e
|
||||
// e || e
|
||||
//
|
||||
// e && e
|
||||
// e || e
|
||||
//
|
||||
// Exprs must contain only side effect free expressions.
|
||||
func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
|
||||
seen := make(map[string]bool)
|
||||
|
@ -110,8 +112,10 @@ func (op boolOp) checkRedundant(pass *analysis.Pass, exprs []ast.Expr) {
|
|||
}
|
||||
|
||||
// checkSuspect checks for expressions of the form
|
||||
// x != c1 || x != c2
|
||||
// x == c1 && x == c2
|
||||
//
|
||||
// x != c1 || x != c2
|
||||
// x == c1 && x == c2
|
||||
//
|
||||
// where c1 and c2 are constant expressions.
|
||||
// If c1 and c2 are the same then it's redundant;
|
||||
// if c1 and c2 are different then it's always true or always false.
|
||||
|
|
3
vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go
generated
vendored
3
vendor/golang.org/x/tools/go/analysis/passes/buildssa/buildssa.go
generated
vendored
|
@ -48,7 +48,8 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
|
||||
// Some Analyzers may need GlobalDebug, in which case we'll have
|
||||
// to set it globally, but let's wait till we need it.
|
||||
mode := ssa.BuilderMode(0)
|
||||
// Monomorphize at least until type parameters are available.
|
||||
mode := ssa.InstantiateGenerics
|
||||
|
||||
prog := ssa.NewProgram(pass.Fset, mode)
|
||||
|
||||
|
|
16
vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
generated
vendored
16
vendor/golang.org/x/tools/go/analysis/passes/cgocall/cgocall.go
generated
vendored
|
@ -122,8 +122,8 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
|
|||
// For example, for each raw cgo source file in the original package,
|
||||
// such as this one:
|
||||
//
|
||||
// package p
|
||||
// import "C"
|
||||
// package p
|
||||
// import "C"
|
||||
// import "fmt"
|
||||
// type T int
|
||||
// const k = 3
|
||||
|
@ -147,9 +147,9 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
|
|||
// the receiver into the first parameter;
|
||||
// and all functions are renamed to "_".
|
||||
//
|
||||
// package p
|
||||
// import . "·this·" // declares T, k, x, y, f, g, T.f
|
||||
// import "C"
|
||||
// package p
|
||||
// import . "·this·" // declares T, k, x, y, f, g, T.f
|
||||
// import "C"
|
||||
// import "fmt"
|
||||
// const _ = 3
|
||||
// var _, _ = fmt.Println()
|
||||
|
@ -169,7 +169,6 @@ func checkCgo(fset *token.FileSet, f *ast.File, info *types.Info, reportf func(t
|
|||
// C.f would resolve to "·this·"._C_func_f, for example. But we have
|
||||
// limited ourselves here to preserving function bodies and initializer
|
||||
// expressions since that is all that the cgocall analyzer needs.
|
||||
//
|
||||
func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*ast.File, info *types.Info, sizes types.Sizes) ([]*ast.File, *types.Info, error) {
|
||||
const thispkg = "·this·"
|
||||
|
||||
|
@ -284,8 +283,9 @@ func typeCheckCgoSourceFiles(fset *token.FileSet, pkg *types.Package, files []*a
|
|||
|
||||
// cgoBaseType tries to look through type conversions involving
|
||||
// unsafe.Pointer to find the real type. It converts:
|
||||
// unsafe.Pointer(x) => x
|
||||
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
|
||||
//
|
||||
// unsafe.Pointer(x) => x
|
||||
// *(*unsafe.Pointer)(unsafe.Pointer(&x)) => x
|
||||
func cgoBaseType(info *types.Info, arg ast.Expr) types.Type {
|
||||
switch arg := arg.(type) {
|
||||
case *ast.CallExpr:
|
||||
|
|
41
vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
generated
vendored
41
vendor/golang.org/x/tools/go/analysis/passes/composite/composite.go
generated
vendored
|
@ -7,6 +7,7 @@
|
|||
package composite
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
@ -83,7 +84,8 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
}
|
||||
for _, typ := range structuralTypes {
|
||||
under := deref(typ.Underlying())
|
||||
if _, ok := under.(*types.Struct); !ok {
|
||||
strct, ok := under.(*types.Struct)
|
||||
if !ok {
|
||||
// skip non-struct composite literals
|
||||
continue
|
||||
}
|
||||
|
@ -92,20 +94,47 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
continue
|
||||
}
|
||||
|
||||
// check if the CompositeLit contains an unkeyed field
|
||||
// check if the struct contains an unkeyed field
|
||||
allKeyValue := true
|
||||
for _, e := range cl.Elts {
|
||||
var suggestedFixAvailable = len(cl.Elts) == strct.NumFields()
|
||||
var missingKeys []analysis.TextEdit
|
||||
for i, e := range cl.Elts {
|
||||
if _, ok := e.(*ast.KeyValueExpr); !ok {
|
||||
allKeyValue = false
|
||||
break
|
||||
if i >= strct.NumFields() {
|
||||
break
|
||||
}
|
||||
field := strct.Field(i)
|
||||
if !field.Exported() {
|
||||
// Adding unexported field names for structs not defined
|
||||
// locally will not work.
|
||||
suggestedFixAvailable = false
|
||||
break
|
||||
}
|
||||
missingKeys = append(missingKeys, analysis.TextEdit{
|
||||
Pos: e.Pos(),
|
||||
End: e.Pos(),
|
||||
NewText: []byte(fmt.Sprintf("%s: ", field.Name())),
|
||||
})
|
||||
}
|
||||
}
|
||||
if allKeyValue {
|
||||
// all the composite literal fields are keyed
|
||||
// all the struct fields are keyed
|
||||
continue
|
||||
}
|
||||
|
||||
pass.ReportRangef(cl, "%s composite literal uses unkeyed fields", typeName)
|
||||
diag := analysis.Diagnostic{
|
||||
Pos: cl.Pos(),
|
||||
End: cl.End(),
|
||||
Message: fmt.Sprintf("%s struct literal uses unkeyed fields", typeName),
|
||||
}
|
||||
if suggestedFixAvailable {
|
||||
diag.SuggestedFixes = []analysis.SuggestedFix{{
|
||||
Message: "Add field names to struct literal",
|
||||
TextEdits: missingKeys,
|
||||
}}
|
||||
}
|
||||
pass.Report(diag)
|
||||
return
|
||||
}
|
||||
})
|
||||
|
|
9
vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go
generated
vendored
9
vendor/golang.org/x/tools/go/analysis/passes/composite/whitelist.go
generated
vendored
|
@ -26,9 +26,10 @@ var unkeyedLiteral = map[string]bool{
|
|||
"unicode.Range16": true,
|
||||
"unicode.Range32": true,
|
||||
|
||||
// These three structs are used in generated test main files,
|
||||
// These four structs are used in generated test main files,
|
||||
// but the generator can be trusted.
|
||||
"testing.InternalBenchmark": true,
|
||||
"testing.InternalExample": true,
|
||||
"testing.InternalTest": true,
|
||||
"testing.InternalBenchmark": true,
|
||||
"testing.InternalExample": true,
|
||||
"testing.InternalTest": true,
|
||||
"testing.InternalFuzzTarget": true,
|
||||
}
|
||||
|
|
2
vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go
generated
vendored
2
vendor/golang.org/x/tools/go/analysis/passes/copylock/copylock.go
generated
vendored
|
@ -128,7 +128,7 @@ func checkCopyLocksCallExpr(pass *analysis.Pass, ce *ast.CallExpr) {
|
|||
}
|
||||
if fun, ok := pass.TypesInfo.Uses[id].(*types.Builtin); ok {
|
||||
switch fun.Name() {
|
||||
case "new", "len", "cap", "Sizeof":
|
||||
case "new", "len", "cap", "Sizeof", "Offsetof", "Alignof":
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
28
vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go
generated
vendored
28
vendor/golang.org/x/tools/go/analysis/passes/errorsas/errorsas.go
generated
vendored
|
@ -7,6 +7,7 @@
|
|||
package errorsas
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"go/ast"
|
||||
"go/types"
|
||||
|
||||
|
@ -50,26 +51,39 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
if len(call.Args) < 2 {
|
||||
return // not enough arguments, e.g. called with return values of another function
|
||||
}
|
||||
if fn.FullName() == "errors.As" && !pointerToInterfaceOrError(pass, call.Args[1]) {
|
||||
pass.ReportRangef(call, "second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
|
||||
if fn.FullName() != "errors.As" {
|
||||
return
|
||||
}
|
||||
if err := checkAsTarget(pass, call.Args[1]); err != nil {
|
||||
pass.ReportRangef(call, "%v", err)
|
||||
}
|
||||
})
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
var errorType = types.Universe.Lookup("error").Type().Underlying().(*types.Interface)
|
||||
var errorType = types.Universe.Lookup("error").Type()
|
||||
|
||||
// pointerToInterfaceOrError reports whether the type of e is a pointer to an interface or a type implementing error,
|
||||
// or is the empty interface.
|
||||
func pointerToInterfaceOrError(pass *analysis.Pass, e ast.Expr) bool {
|
||||
|
||||
// checkAsTarget reports an error if the second argument to errors.As is invalid.
|
||||
func checkAsTarget(pass *analysis.Pass, e ast.Expr) error {
|
||||
t := pass.TypesInfo.Types[e].Type
|
||||
if it, ok := t.Underlying().(*types.Interface); ok && it.NumMethods() == 0 {
|
||||
return true
|
||||
// A target of interface{} is always allowed, since it often indicates
|
||||
// a value forwarded from another source.
|
||||
return nil
|
||||
}
|
||||
pt, ok := t.Underlying().(*types.Pointer)
|
||||
if !ok {
|
||||
return false
|
||||
return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
|
||||
}
|
||||
if pt.Elem() == errorType {
|
||||
return errors.New("second argument to errors.As should not be *error")
|
||||
}
|
||||
_, ok = pt.Elem().Underlying().(*types.Interface)
|
||||
return ok || types.Implements(pt.Elem(), errorType)
|
||||
if ok || types.Implements(pt.Elem(), errorType.Underlying().(*types.Interface)) {
|
||||
return nil
|
||||
}
|
||||
return errors.New("second argument to errors.As must be a non-nil pointer to either a type that implements error, or to any interface type")
|
||||
}
|
||||
|
|
7
vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go
generated
vendored
7
vendor/golang.org/x/tools/go/analysis/passes/fieldalignment/fieldalignment.go
generated
vendored
|
@ -23,7 +23,7 @@ import (
|
|||
const Doc = `find structs that would use less memory if their fields were sorted
|
||||
|
||||
This analyzer find structs that can be rearranged to use less memory, and provides
|
||||
a suggested edit with the optimal order.
|
||||
a suggested edit with the most compact order.
|
||||
|
||||
Note that there are two different diagnostics reported. One checks struct size,
|
||||
and the other reports "pointer bytes" used. Pointer bytes is how many bytes of the
|
||||
|
@ -41,6 +41,11 @@ has 24 pointer bytes because it has to scan further through the *uint32.
|
|||
struct { string; uint32 }
|
||||
|
||||
has 8 because it can stop immediately after the string pointer.
|
||||
|
||||
Be aware that the most compact order is not always the most efficient.
|
||||
In rare cases it may cause two variables each updated by its own goroutine
|
||||
to occupy the same CPU cache line, inducing a form of memory contention
|
||||
known as "false sharing" that slows down both goroutines.
|
||||
`
|
||||
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
|
|
27
vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
generated
vendored
27
vendor/golang.org/x/tools/go/analysis/passes/httpresponse/httpresponse.go
generated
vendored
|
@ -62,15 +62,23 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
|
||||
// Find the innermost containing block, and get the list
|
||||
// of statements starting with the one containing call.
|
||||
stmts := restOfBlock(stack)
|
||||
stmts, ncalls := restOfBlock(stack)
|
||||
if len(stmts) < 2 {
|
||||
return true // the call to the http function is the last statement of the block.
|
||||
// The call to the http function is the last statement of the block.
|
||||
return true
|
||||
}
|
||||
|
||||
// Skip cases in which the call is wrapped by another (#52661).
|
||||
// Example: resp, err := checkError(http.Get(url))
|
||||
if ncalls > 1 {
|
||||
return true
|
||||
}
|
||||
|
||||
asg, ok := stmts[0].(*ast.AssignStmt)
|
||||
if !ok {
|
||||
return true // the first statement is not assignment.
|
||||
}
|
||||
|
||||
resp := rootIdent(asg.Lhs[0])
|
||||
if resp == nil {
|
||||
return true // could not find the http.Response in the assignment.
|
||||
|
@ -130,20 +138,25 @@ func isHTTPFuncOrMethodOnClient(info *types.Info, expr *ast.CallExpr) bool {
|
|||
}
|
||||
|
||||
// restOfBlock, given a traversal stack, finds the innermost containing
|
||||
// block and returns the suffix of its statements starting with the
|
||||
// current node (the last element of stack).
|
||||
func restOfBlock(stack []ast.Node) []ast.Stmt {
|
||||
// block and returns the suffix of its statements starting with the current
|
||||
// node, along with the number of call expressions encountered.
|
||||
func restOfBlock(stack []ast.Node) ([]ast.Stmt, int) {
|
||||
var ncalls int
|
||||
for i := len(stack) - 1; i >= 0; i-- {
|
||||
if b, ok := stack[i].(*ast.BlockStmt); ok {
|
||||
for j, v := range b.List {
|
||||
if v == stack[i+1] {
|
||||
return b.List[j:]
|
||||
return b.List[j:], ncalls
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if _, ok := stack[i].(*ast.CallExpr); ok {
|
||||
ncalls++
|
||||
}
|
||||
}
|
||||
return nil
|
||||
return nil, 0
|
||||
}
|
||||
|
||||
// rootIdent finds the root identifier x in a chain of selections x.y.z, or nil if not found.
|
||||
|
|
15
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
15
vendor/golang.org/x/tools/go/analysis/passes/inspect/inspect.go
generated
vendored
|
@ -19,14 +19,13 @@
|
|||
// Requires: []*analysis.Analyzer{inspect.Analyzer},
|
||||
// }
|
||||
//
|
||||
// func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
// inspect.Preorder(nil, func(n ast.Node) {
|
||||
// ...
|
||||
// })
|
||||
// return nil
|
||||
// }
|
||||
//
|
||||
// func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// inspect := pass.ResultOf[inspect.Analyzer].(*inspector.Inspector)
|
||||
// inspect.Preorder(nil, func(n ast.Node) {
|
||||
// ...
|
||||
// })
|
||||
// return nil
|
||||
// }
|
||||
package inspect
|
||||
|
||||
import (
|
||||
|
|
6
vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go
generated
vendored
6
vendor/golang.org/x/tools/go/analysis/passes/loopclosure/loopclosure.go
generated
vendored
|
@ -128,9 +128,9 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
// (but not awaited) in another goroutine as a consequence of the call.
|
||||
// For example, given the g.Go call below, it returns the function literal expression.
|
||||
//
|
||||
// import "sync/errgroup"
|
||||
// var g errgroup.Group
|
||||
// g.Go(func() error { ... })
|
||||
// import "sync/errgroup"
|
||||
// var g errgroup.Group
|
||||
// g.Go(func() error { ... })
|
||||
//
|
||||
// Currently only "golang.org/x/sync/errgroup.Group()" is considered.
|
||||
func goInvokes(info *types.Info, call *ast.CallExpr) ast.Expr {
|
||||
|
|
9
vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go
generated
vendored
9
vendor/golang.org/x/tools/go/analysis/passes/nilness/nilness.go
generated
vendored
|
@ -62,6 +62,7 @@ var Analyzer = &analysis.Analyzer{
|
|||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
|
||||
// TODO(48525): ssainput.SrcFuncs is missing fn._Instances(). runFunc will be skipped.
|
||||
for _, fn := range ssainput.SrcFuncs {
|
||||
runFunc(pass, fn)
|
||||
}
|
||||
|
@ -250,7 +251,7 @@ func (n nilness) String() string { return nilnessStrings[n+1] }
|
|||
// or unknown given the dominating stack of facts.
|
||||
func nilnessOf(stack []fact, v ssa.Value) nilness {
|
||||
switch v := v.(type) {
|
||||
// unwrap ChangeInterface values recursively, to detect if underlying
|
||||
// unwrap ChangeInterface and Slice values recursively, to detect if underlying
|
||||
// values have any facts recorded or are otherwise known with regard to nilness.
|
||||
//
|
||||
// This work must be in addition to expanding facts about
|
||||
|
@ -264,6 +265,10 @@ func nilnessOf(stack []fact, v ssa.Value) nilness {
|
|||
if underlying := nilnessOf(stack, v.X); underlying != unknown {
|
||||
return underlying
|
||||
}
|
||||
case *ssa.Slice:
|
||||
if underlying := nilnessOf(stack, v.X); underlying != unknown {
|
||||
return underlying
|
||||
}
|
||||
case *ssa.SliceToArrayPointer:
|
||||
nn := nilnessOf(stack, v.X)
|
||||
if slice2ArrayPtrLen(v) > 0 {
|
||||
|
@ -342,7 +347,7 @@ func eq(b *ssa.BasicBlock) (op *ssa.BinOp, tsucc, fsucc *ssa.BasicBlock) {
|
|||
// ChangeInterface, have transitive nilness, such that if you know the
|
||||
// underlying value is nil, you also know the value itself is nil, and vice
|
||||
// versa. This operation allows callers to match on any of the related values
|
||||
// in analyses, rather than just the one form of the value that happend to
|
||||
// in analyses, rather than just the one form of the value that happened to
|
||||
// appear in a comparison.
|
||||
//
|
||||
// This work must be in addition to unwrapping values within nilnessOf because
|
||||
|
|
8
vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go
generated
vendored
8
vendor/golang.org/x/tools/go/analysis/passes/pkgfact/pkgfact.go
generated
vendored
|
@ -10,14 +10,14 @@
|
|||
// Each key/value pair comes from a top-level constant declaration
|
||||
// whose name starts and ends with "_". For example:
|
||||
//
|
||||
// package p
|
||||
// package p
|
||||
//
|
||||
// const _greeting_ = "hello"
|
||||
// const _audience_ = "world"
|
||||
// const _greeting_ = "hello"
|
||||
// const _audience_ = "world"
|
||||
//
|
||||
// the pkgfact analysis output for package p would be:
|
||||
//
|
||||
// {"greeting": "hello", "audience": "world"}.
|
||||
// {"greeting": "hello", "audience": "world"}.
|
||||
//
|
||||
// In addition, the analysis reports a diagnostic at each import
|
||||
// showing which key/value pairs it contributes.
|
||||
|
|
7
vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
7
vendor/golang.org/x/tools/go/analysis/passes/printf/printf.go
generated
vendored
|
@ -342,7 +342,6 @@ func checkPrintfFwd(pass *analysis.Pass, w *printfWrapper, call *ast.CallExpr, k
|
|||
// not do so with gccgo, and nor do some other build systems.
|
||||
// TODO(adonovan): eliminate the redundant facts once this restriction
|
||||
// is lifted.
|
||||
//
|
||||
var isPrint = stringSet{
|
||||
"fmt.Errorf": true,
|
||||
"fmt.Fprint": true,
|
||||
|
@ -931,9 +930,9 @@ func okPrintfArg(pass *analysis.Pass, call *ast.CallExpr, state *formatState) (o
|
|||
// recursiveStringer reports whether the argument e is a potential
|
||||
// recursive call to stringer or is an error, such as t and &t in these examples:
|
||||
//
|
||||
// func (t *T) String() string { printf("%s", t) }
|
||||
// func (t T) Error() string { printf("%s", t) }
|
||||
// func (t T) String() string { printf("%s", &t) }
|
||||
// func (t *T) String() string { printf("%s", t) }
|
||||
// func (t T) Error() string { printf("%s", t) }
|
||||
// func (t T) String() string { printf("%s", &t) }
|
||||
func recursiveStringer(pass *analysis.Pass, e ast.Expr) (string, bool) {
|
||||
typ := pass.TypesInfo.Types[e].Type
|
||||
|
||||
|
|
1
vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go
generated
vendored
1
vendor/golang.org/x/tools/go/analysis/passes/shadow/shadow.go
generated
vendored
|
@ -120,7 +120,6 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
// the block, we should complain about it but don't.
|
||||
// - A variable declared inside a function literal can falsely be identified
|
||||
// as shadowing a variable in the outer function.
|
||||
//
|
||||
type span struct {
|
||||
min token.Pos
|
||||
max token.Pos
|
||||
|
|
2
vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go
generated
vendored
2
vendor/golang.org/x/tools/go/analysis/passes/sigchanyzer/sigchanyzer.go
generated
vendored
|
@ -50,7 +50,7 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
}
|
||||
case *ast.CallExpr:
|
||||
// Only signal.Notify(make(chan os.Signal), os.Interrupt) is safe,
|
||||
// conservatively treate others as not safe, see golang/go#45043
|
||||
// conservatively treat others as not safe, see golang/go#45043
|
||||
if isBuiltinMake(pass.TypesInfo, arg) {
|
||||
return
|
||||
}
|
||||
|
|
234
vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
generated
vendored
234
vendor/golang.org/x/tools/go/analysis/passes/tests/tests.go
generated
vendored
|
@ -7,6 +7,7 @@
|
|||
package tests
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
|
@ -16,6 +17,7 @@ import (
|
|||
"unicode/utf8"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/internal/analysisinternal"
|
||||
"golang.org/x/tools/internal/typeparams"
|
||||
)
|
||||
|
||||
|
@ -34,6 +36,24 @@ var Analyzer = &analysis.Analyzer{
|
|||
Run: run,
|
||||
}
|
||||
|
||||
var acceptedFuzzTypes = []types.Type{
|
||||
types.Typ[types.String],
|
||||
types.Typ[types.Bool],
|
||||
types.Typ[types.Float32],
|
||||
types.Typ[types.Float64],
|
||||
types.Typ[types.Int],
|
||||
types.Typ[types.Int8],
|
||||
types.Typ[types.Int16],
|
||||
types.Typ[types.Int32],
|
||||
types.Typ[types.Int64],
|
||||
types.Typ[types.Uint],
|
||||
types.Typ[types.Uint8],
|
||||
types.Typ[types.Uint16],
|
||||
types.Typ[types.Uint32],
|
||||
types.Typ[types.Uint64],
|
||||
types.NewSlice(types.Universe.Lookup("byte").Type()),
|
||||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
for _, f := range pass.Files {
|
||||
if !strings.HasSuffix(pass.Fset.File(f.Pos()).Name(), "_test.go") {
|
||||
|
@ -54,11 +74,223 @@ func run(pass *analysis.Pass) (interface{}, error) {
|
|||
case strings.HasPrefix(fn.Name.Name, "Benchmark"):
|
||||
checkTest(pass, fn, "Benchmark")
|
||||
}
|
||||
// run fuzz tests diagnostics only for 1.18 i.e. when analysisinternal.DiagnoseFuzzTests is turned on.
|
||||
if strings.HasPrefix(fn.Name.Name, "Fuzz") && analysisinternal.DiagnoseFuzzTests {
|
||||
checkTest(pass, fn, "Fuzz")
|
||||
checkFuzz(pass, fn)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// checkFuzz checks the contents of a fuzz function.
|
||||
func checkFuzz(pass *analysis.Pass, fn *ast.FuncDecl) {
|
||||
params := checkFuzzCall(pass, fn)
|
||||
if params != nil {
|
||||
checkAddCalls(pass, fn, params)
|
||||
}
|
||||
}
|
||||
|
||||
// checkFuzzCall checks the arguments of f.Fuzz() calls:
|
||||
//
|
||||
// 1. f.Fuzz() should call a function and it should be of type (*testing.F).Fuzz().
|
||||
// 2. The called function in f.Fuzz(func(){}) should not return result.
|
||||
// 3. First argument of func() should be of type *testing.T
|
||||
// 4. Second argument onwards should be of type []byte, string, bool, byte,
|
||||
// rune, float32, float64, int, int8, int16, int32, int64, uint, uint8, uint16,
|
||||
// uint32, uint64
|
||||
// 5. func() must not call any *F methods, e.g. (*F).Log, (*F).Error, (*F).Skip
|
||||
// The only *F methods that are allowed in the (*F).Fuzz function are (*F).Failed and (*F).Name.
|
||||
//
|
||||
// Returns the list of parameters to the fuzz function, if they are valid fuzz parameters.
|
||||
func checkFuzzCall(pass *analysis.Pass, fn *ast.FuncDecl) (params *types.Tuple) {
|
||||
ast.Inspect(fn, func(n ast.Node) bool {
|
||||
call, ok := n.(*ast.CallExpr)
|
||||
if ok {
|
||||
if !isFuzzTargetDotFuzz(pass, call) {
|
||||
return true
|
||||
}
|
||||
|
||||
// Only one argument (func) must be passed to (*testing.F).Fuzz.
|
||||
if len(call.Args) != 1 {
|
||||
return true
|
||||
}
|
||||
expr := call.Args[0]
|
||||
if pass.TypesInfo.Types[expr].Type == nil {
|
||||
return true
|
||||
}
|
||||
t := pass.TypesInfo.Types[expr].Type.Underlying()
|
||||
tSign, argOk := t.(*types.Signature)
|
||||
// Argument should be a function
|
||||
if !argOk {
|
||||
pass.ReportRangef(expr, "argument to Fuzz must be a function")
|
||||
return false
|
||||
}
|
||||
// ff Argument function should not return
|
||||
if tSign.Results().Len() != 0 {
|
||||
pass.ReportRangef(expr, "fuzz target must not return any value")
|
||||
}
|
||||
// ff Argument function should have 1 or more argument
|
||||
if tSign.Params().Len() == 0 {
|
||||
pass.ReportRangef(expr, "fuzz target must have 1 or more argument")
|
||||
return false
|
||||
}
|
||||
ok := validateFuzzArgs(pass, tSign.Params(), expr)
|
||||
if ok && params == nil {
|
||||
params = tSign.Params()
|
||||
}
|
||||
// Inspect the function that was passed as an argument to make sure that
|
||||
// there are no calls to *F methods, except for Name and Failed.
|
||||
ast.Inspect(expr, func(n ast.Node) bool {
|
||||
if call, ok := n.(*ast.CallExpr); ok {
|
||||
if !isFuzzTargetDot(pass, call, "") {
|
||||
return true
|
||||
}
|
||||
if !isFuzzTargetDot(pass, call, "Name") && !isFuzzTargetDot(pass, call, "Failed") {
|
||||
pass.ReportRangef(call, "fuzz target must not call any *F methods")
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
// We do not need to look at any calls to f.Fuzz inside of a Fuzz call,
|
||||
// since they are not allowed.
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
return params
|
||||
}
|
||||
|
||||
// checkAddCalls checks that the arguments of f.Add calls have the same number and type of arguments as
|
||||
// the signature of the function passed to (*testing.F).Fuzz
|
||||
func checkAddCalls(pass *analysis.Pass, fn *ast.FuncDecl, params *types.Tuple) {
|
||||
ast.Inspect(fn, func(n ast.Node) bool {
|
||||
call, ok := n.(*ast.CallExpr)
|
||||
if ok {
|
||||
if !isFuzzTargetDotAdd(pass, call) {
|
||||
return true
|
||||
}
|
||||
|
||||
// The first argument to function passed to (*testing.F).Fuzz is (*testing.T).
|
||||
if len(call.Args) != params.Len()-1 {
|
||||
pass.ReportRangef(call, "wrong number of values in call to (*testing.F).Add: %d, fuzz target expects %d", len(call.Args), params.Len()-1)
|
||||
return true
|
||||
}
|
||||
var mismatched []int
|
||||
for i, expr := range call.Args {
|
||||
if pass.TypesInfo.Types[expr].Type == nil {
|
||||
return true
|
||||
}
|
||||
t := pass.TypesInfo.Types[expr].Type
|
||||
if !types.Identical(t, params.At(i+1).Type()) {
|
||||
mismatched = append(mismatched, i)
|
||||
}
|
||||
}
|
||||
// If just one of the types is mismatched report for that
|
||||
// type only. Otherwise report for the whole call to (*testing.F).Add
|
||||
if len(mismatched) == 1 {
|
||||
i := mismatched[0]
|
||||
expr := call.Args[i]
|
||||
t := pass.TypesInfo.Types[expr].Type
|
||||
pass.ReportRangef(expr, fmt.Sprintf("mismatched type in call to (*testing.F).Add: %v, fuzz target expects %v", t, params.At(i+1).Type()))
|
||||
} else if len(mismatched) > 1 {
|
||||
var gotArgs, wantArgs []types.Type
|
||||
for i := 0; i < len(call.Args); i++ {
|
||||
gotArgs, wantArgs = append(gotArgs, pass.TypesInfo.Types[call.Args[i]].Type), append(wantArgs, params.At(i+1).Type())
|
||||
}
|
||||
pass.ReportRangef(call, fmt.Sprintf("mismatched types in call to (*testing.F).Add: %v, fuzz target expects %v", gotArgs, wantArgs))
|
||||
}
|
||||
}
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
// isFuzzTargetDotFuzz reports whether call is (*testing.F).Fuzz().
|
||||
func isFuzzTargetDotFuzz(pass *analysis.Pass, call *ast.CallExpr) bool {
|
||||
return isFuzzTargetDot(pass, call, "Fuzz")
|
||||
}
|
||||
|
||||
// isFuzzTargetDotAdd reports whether call is (*testing.F).Add().
|
||||
func isFuzzTargetDotAdd(pass *analysis.Pass, call *ast.CallExpr) bool {
|
||||
return isFuzzTargetDot(pass, call, "Add")
|
||||
}
|
||||
|
||||
// isFuzzTargetDot reports whether call is (*testing.F).<name>().
|
||||
func isFuzzTargetDot(pass *analysis.Pass, call *ast.CallExpr, name string) bool {
|
||||
if selExpr, ok := call.Fun.(*ast.SelectorExpr); ok {
|
||||
if !isTestingType(pass.TypesInfo.Types[selExpr.X].Type, "F") {
|
||||
return false
|
||||
}
|
||||
if name == "" || selExpr.Sel.Name == name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// Validate the arguments of fuzz target.
|
||||
func validateFuzzArgs(pass *analysis.Pass, params *types.Tuple, expr ast.Expr) bool {
|
||||
fLit, isFuncLit := expr.(*ast.FuncLit)
|
||||
exprRange := expr
|
||||
ok := true
|
||||
if !isTestingType(params.At(0).Type(), "T") {
|
||||
if isFuncLit {
|
||||
exprRange = fLit.Type.Params.List[0].Type
|
||||
}
|
||||
pass.ReportRangef(exprRange, "the first parameter of a fuzz target must be *testing.T")
|
||||
ok = false
|
||||
}
|
||||
for i := 1; i < params.Len(); i++ {
|
||||
if !isAcceptedFuzzType(params.At(i).Type()) {
|
||||
if isFuncLit {
|
||||
curr := 0
|
||||
for _, field := range fLit.Type.Params.List {
|
||||
curr += len(field.Names)
|
||||
if i < curr {
|
||||
exprRange = field.Type
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
pass.ReportRangef(exprRange, "fuzzing arguments can only have the following types: "+formatAcceptedFuzzType())
|
||||
ok = false
|
||||
}
|
||||
}
|
||||
return ok
|
||||
}
|
||||
|
||||
func isTestingType(typ types.Type, testingType string) bool {
|
||||
ptr, ok := typ.(*types.Pointer)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
named, ok := ptr.Elem().(*types.Named)
|
||||
if !ok {
|
||||
return false
|
||||
}
|
||||
return named.Obj().Pkg().Path() == "testing" && named.Obj().Name() == testingType
|
||||
}
|
||||
|
||||
// Validate that fuzz target function's arguments are of accepted types.
|
||||
func isAcceptedFuzzType(paramType types.Type) bool {
|
||||
for _, typ := range acceptedFuzzTypes {
|
||||
if types.Identical(typ, paramType) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func formatAcceptedFuzzType() string {
|
||||
var acceptedFuzzTypesStrings []string
|
||||
for _, typ := range acceptedFuzzTypes {
|
||||
acceptedFuzzTypesStrings = append(acceptedFuzzTypesStrings, typ.String())
|
||||
}
|
||||
acceptedFuzzTypesMsg := strings.Join(acceptedFuzzTypesStrings, ", ")
|
||||
return acceptedFuzzTypesMsg
|
||||
}
|
||||
|
||||
func isExampleSuffix(s string) bool {
|
||||
r, size := utf8.DecodeRuneInString(s)
|
||||
return size > 0 && unicode.IsLower(r)
|
||||
|
@ -243,10 +475,12 @@ func checkTest(pass *analysis.Pass, fn *ast.FuncDecl, prefix string) {
|
|||
if tparams := typeparams.ForFuncType(fn.Type); tparams != nil && len(tparams.List) > 0 {
|
||||
// Note: cmd/go/internal/load also errors about TestXXX and BenchmarkXXX functions with type parameters.
|
||||
// We have currently decided to also warn before compilation/package loading. This can help users in IDEs.
|
||||
// TODO(adonovan): use ReportRangef(tparams).
|
||||
pass.Reportf(fn.Pos(), "%s has type parameters: it will not be run by go test as a %sXXX function", fn.Name.Name, prefix)
|
||||
}
|
||||
|
||||
if !isTestSuffix(fn.Name.Name[len(prefix):]) {
|
||||
// TODO(adonovan): use ReportRangef(fn.Name).
|
||||
pass.Reportf(fn.Pos(), "%s has malformed name: first letter after '%s' must not be lowercase", fn.Name.Name, prefix)
|
||||
}
|
||||
}
|
||||
|
|
53
vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
generated
vendored
53
vendor/golang.org/x/tools/go/analysis/passes/unusedwrite/unusedwrite.go
generated
vendored
|
@ -41,7 +41,7 @@ Another example is about non-pointer receiver:
|
|||
`
|
||||
|
||||
// Analyzer reports instances of writes to struct fields and arrays
|
||||
//that are never read.
|
||||
// that are never read.
|
||||
var Analyzer = &analysis.Analyzer{
|
||||
Name: "unusedwrite",
|
||||
Doc: Doc,
|
||||
|
@ -50,40 +50,49 @@ var Analyzer = &analysis.Analyzer{
|
|||
}
|
||||
|
||||
func run(pass *analysis.Pass) (interface{}, error) {
|
||||
// Check the writes to struct and array objects.
|
||||
checkStore := func(store *ssa.Store) {
|
||||
// Consider field/index writes to an object whose elements are copied and not shared.
|
||||
// MapUpdate is excluded since only the reference of the map is copied.
|
||||
switch addr := store.Addr.(type) {
|
||||
case *ssa.FieldAddr:
|
||||
if isDeadStore(store, addr.X, addr) {
|
||||
// Report the bug.
|
||||
ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
|
||||
for _, fn := range ssainput.SrcFuncs {
|
||||
// TODO(taking): Iterate over fn._Instantiations() once exported. If so, have 1 report per Pos().
|
||||
reports := checkStores(fn)
|
||||
for _, store := range reports {
|
||||
switch addr := store.Addr.(type) {
|
||||
case *ssa.FieldAddr:
|
||||
pass.Reportf(store.Pos(),
|
||||
"unused write to field %s",
|
||||
getFieldName(addr.X.Type(), addr.Field))
|
||||
}
|
||||
case *ssa.IndexAddr:
|
||||
if isDeadStore(store, addr.X, addr) {
|
||||
// Report the bug.
|
||||
case *ssa.IndexAddr:
|
||||
pass.Reportf(store.Pos(),
|
||||
"unused write to array index %s", addr.Index)
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
ssainput := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
|
||||
for _, fn := range ssainput.SrcFuncs {
|
||||
// Visit each block. No need to visit fn.Recover.
|
||||
for _, blk := range fn.Blocks {
|
||||
for _, instr := range blk.Instrs {
|
||||
// Identify writes.
|
||||
if store, ok := instr.(*ssa.Store); ok {
|
||||
checkStore(store)
|
||||
// checkStores returns *Stores in fn whose address is written to but never used.
|
||||
func checkStores(fn *ssa.Function) []*ssa.Store {
|
||||
var reports []*ssa.Store
|
||||
// Visit each block. No need to visit fn.Recover.
|
||||
for _, blk := range fn.Blocks {
|
||||
for _, instr := range blk.Instrs {
|
||||
// Identify writes.
|
||||
if store, ok := instr.(*ssa.Store); ok {
|
||||
// Consider field/index writes to an object whose elements are copied and not shared.
|
||||
// MapUpdate is excluded since only the reference of the map is copied.
|
||||
switch addr := store.Addr.(type) {
|
||||
case *ssa.FieldAddr:
|
||||
if isDeadStore(store, addr.X, addr) {
|
||||
reports = append(reports, store)
|
||||
}
|
||||
case *ssa.IndexAddr:
|
||||
if isDeadStore(store, addr.X, addr) {
|
||||
reports = append(reports, store)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil
|
||||
return reports
|
||||
}
|
||||
|
||||
// isDeadStore determines whether a field/index write to an object is dead.
|
||||
|
|
5
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
5
vendor/golang.org/x/tools/go/analysis/validate.go
generated
vendored
|
@ -14,6 +14,8 @@ import (
|
|||
// Validate reports an error if any of the analyzers are misconfigured.
|
||||
// Checks include:
|
||||
// that the name is a valid identifier;
|
||||
// that the Doc is not empty;
|
||||
// that the Run is non-nil;
|
||||
// that the Requires graph is acyclic;
|
||||
// that analyzer fact types are unique;
|
||||
// that each fact type is a pointer.
|
||||
|
@ -46,6 +48,9 @@ func Validate(analyzers []*Analyzer) error {
|
|||
return fmt.Errorf("analyzer %q is undocumented", a)
|
||||
}
|
||||
|
||||
if a.Run == nil {
|
||||
return fmt.Errorf("analyzer %q has nil Run", a)
|
||||
}
|
||||
// fact types
|
||||
for _, f := range a.FactTypes {
|
||||
if f == nil {
|
||||
|
|
29
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
29
vendor/golang.org/x/tools/go/ast/astutil/enclosing.go
generated
vendored
|
@ -22,9 +22,9 @@ import (
|
|||
// additional whitespace abutting a node to be enclosed by it.
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <-A->
|
||||
// <----B----->
|
||||
// z := x + y // add them
|
||||
// <-A->
|
||||
// <----B----->
|
||||
//
|
||||
// the ast.BinaryExpr(+) node is considered to enclose interval B
|
||||
// even though its [Pos()..End()) is actually only interval A.
|
||||
|
@ -43,10 +43,10 @@ import (
|
|||
// interior whitespace of path[0].
|
||||
// In this example:
|
||||
//
|
||||
// z := x + y // add them
|
||||
// <--C--> <---E-->
|
||||
// ^
|
||||
// D
|
||||
// z := x + y // add them
|
||||
// <--C--> <---E-->
|
||||
// ^
|
||||
// D
|
||||
//
|
||||
// intervals C, D and E are inexact. C is contained by the
|
||||
// z-assignment statement, because it spans three of its children (:=,
|
||||
|
@ -54,12 +54,11 @@ import (
|
|||
// interior whitespace of the assignment. E is considered interior
|
||||
// whitespace of the BlockStmt containing the assignment.
|
||||
//
|
||||
// Precondition: [start, end) both lie within the same file as root.
|
||||
// TODO(adonovan): return (nil, false) in this case and remove precond.
|
||||
// Requires FileSet; see loader.tokenFileContainsPos.
|
||||
//
|
||||
// Postcondition: path is never nil; it always contains at least 'root'.
|
||||
//
|
||||
// The resulting path is never empty; it always contains at least the
|
||||
// 'root' *ast.File. Ideally PathEnclosingInterval would reject
|
||||
// intervals that lie wholly or partially outside the range of the
|
||||
// file, but unfortunately ast.File records only the token.Pos of
|
||||
// the 'package' keyword, but not of the start of the file itself.
|
||||
func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) {
|
||||
// fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging
|
||||
|
||||
|
@ -135,6 +134,7 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
|
|||
return false // inexact: overlaps multiple children
|
||||
}
|
||||
|
||||
// Ensure [start,end) is nondecreasing.
|
||||
if start > end {
|
||||
start, end = end, start
|
||||
}
|
||||
|
@ -162,7 +162,6 @@ func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Nod
|
|||
// tokenNode is a dummy implementation of ast.Node for a single token.
|
||||
// They are used transiently by PathEnclosingInterval but never escape
|
||||
// this package.
|
||||
//
|
||||
type tokenNode struct {
|
||||
pos token.Pos
|
||||
end token.Pos
|
||||
|
@ -183,7 +182,6 @@ func tok(pos token.Pos, len int) ast.Node {
|
|||
// childrenOf returns the direct non-nil children of ast.Node n.
|
||||
// It may include fake ast.Node implementations for bare tokens.
|
||||
// it is not safe to call (e.g.) ast.Walk on such nodes.
|
||||
//
|
||||
func childrenOf(n ast.Node) []ast.Node {
|
||||
var children []ast.Node
|
||||
|
||||
|
@ -488,7 +486,6 @@ func (sl byPos) Swap(i, j int) {
|
|||
// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident,
|
||||
// StarExpr) we could be much more specific given the path to the AST
|
||||
// root. Perhaps we should do that.
|
||||
//
|
||||
func NodeDescription(n ast.Node) string {
|
||||
switch n := n.(type) {
|
||||
case *ast.ArrayType:
|
||||
|
|
7
vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
7
vendor/golang.org/x/tools/go/ast/astutil/imports.go
generated
vendored
|
@ -22,8 +22,11 @@ func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) {
|
|||
// If name is not empty, it is used to rename the import.
|
||||
//
|
||||
// For example, calling
|
||||
//
|
||||
// AddNamedImport(fset, f, "pathpkg", "path")
|
||||
//
|
||||
// adds
|
||||
//
|
||||
// import pathpkg "path"
|
||||
func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) {
|
||||
if imports(f, name, path) {
|
||||
|
@ -270,8 +273,8 @@ func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (del
|
|||
}
|
||||
if j > 0 {
|
||||
lastImpspec := gen.Specs[j-1].(*ast.ImportSpec)
|
||||
lastLine := fset.Position(lastImpspec.Path.ValuePos).Line
|
||||
line := fset.Position(impspec.Path.ValuePos).Line
|
||||
lastLine := fset.PositionFor(lastImpspec.Path.ValuePos, false).Line
|
||||
line := fset.PositionFor(impspec.Path.ValuePos, false).Line
|
||||
|
||||
// We deleted an entry but now there may be
|
||||
// a blank line-sized hole where the import was.
|
||||
|
|
11
vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
11
vendor/golang.org/x/tools/go/ast/astutil/rewrite.go
generated
vendored
|
@ -41,7 +41,6 @@ type ApplyFunc func(*Cursor) bool
|
|||
// Children are traversed in the order in which they appear in the
|
||||
// respective node's struct definition. A package's files are
|
||||
// traversed in the filenames' alphabetical order.
|
||||
//
|
||||
func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) {
|
||||
parent := &struct{ ast.Node }{root}
|
||||
defer func() {
|
||||
|
@ -65,8 +64,8 @@ var abort = new(int) // singleton, to signal termination of Apply
|
|||
// c.Parent(), and f is the field identifier with name c.Name(),
|
||||
// the following invariants hold:
|
||||
//
|
||||
// p.f == c.Node() if c.Index() < 0
|
||||
// p.f[c.Index()] == c.Node() if c.Index() >= 0
|
||||
// p.f == c.Node() if c.Index() < 0
|
||||
// p.f[c.Index()] == c.Node() if c.Index() >= 0
|
||||
//
|
||||
// The methods Replace, Delete, InsertBefore, and InsertAfter
|
||||
// can be used to change the AST without disrupting Apply.
|
||||
|
@ -294,6 +293,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
|
|||
a.apply(n, "Fields", nil, n.Fields)
|
||||
|
||||
case *ast.FuncType:
|
||||
if tparams := typeparams.ForFuncType(n); tparams != nil {
|
||||
a.apply(n, "TypeParams", nil, tparams)
|
||||
}
|
||||
a.apply(n, "Params", nil, n.Params)
|
||||
a.apply(n, "Results", nil, n.Results)
|
||||
|
||||
|
@ -406,6 +408,9 @@ func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.
|
|||
case *ast.TypeSpec:
|
||||
a.apply(n, "Doc", nil, n.Doc)
|
||||
a.apply(n, "Name", nil, n.Name)
|
||||
if tparams := typeparams.ForTypeSpec(n); tparams != nil {
|
||||
a.apply(n, "TypeParams", nil, tparams)
|
||||
}
|
||||
a.apply(n, "Type", nil, n.Type)
|
||||
a.apply(n, "Comment", nil, n.Comment)
|
||||
|
||||
|
|
15
vendor/golang.org/x/tools/go/ast/inspector/typeof.go
generated
vendored
15
vendor/golang.org/x/tools/go/ast/inspector/typeof.go
generated
vendored
|
@ -77,12 +77,14 @@ const (
|
|||
// typeOf returns a distinct single-bit value that represents the type of n.
|
||||
//
|
||||
// Various implementations were benchmarked with BenchmarkNewInspector:
|
||||
// GOGC=off
|
||||
// - type switch 4.9-5.5ms 2.1ms
|
||||
// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
|
||||
// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
|
||||
// - linear scan, unordered list 6.4ms 2.7ms
|
||||
// - hash table 6.5ms 3.1ms
|
||||
//
|
||||
// GOGC=off
|
||||
// - type switch 4.9-5.5ms 2.1ms
|
||||
// - binary search over a sorted list of types 5.5-5.9ms 2.5ms
|
||||
// - linear scan, frequency-ordered list 5.9-6.1ms 2.7ms
|
||||
// - linear scan, unordered list 6.4ms 2.7ms
|
||||
// - hash table 6.5ms 3.1ms
|
||||
//
|
||||
// A perfect hash seemed like overkill.
|
||||
//
|
||||
// The compiler's switch statement is the clear winner
|
||||
|
@ -90,7 +92,6 @@ const (
|
|||
// with constant conditions and good branch prediction.
|
||||
// (Sadly it is the most verbose in source code.)
|
||||
// Binary search suffered from poor branch prediction.
|
||||
//
|
||||
func typeOf(n ast.Node) uint64 {
|
||||
// Fast path: nearly half of all nodes are identifiers.
|
||||
if _, ok := n.(*ast.Ident); ok {
|
||||
|
|
11
vendor/golang.org/x/tools/go/buildutil/allpackages.go
generated
vendored
11
vendor/golang.org/x/tools/go/buildutil/allpackages.go
generated
vendored
|
@ -28,7 +28,6 @@ import (
|
|||
//
|
||||
// All I/O is done via the build.Context file system interface,
|
||||
// which must be concurrency-safe.
|
||||
//
|
||||
func AllPackages(ctxt *build.Context) []string {
|
||||
var list []string
|
||||
ForEachPackage(ctxt, func(pkg string, _ error) {
|
||||
|
@ -48,7 +47,6 @@ func AllPackages(ctxt *build.Context) []string {
|
|||
//
|
||||
// All I/O is done via the build.Context file system interface,
|
||||
// which must be concurrency-safe.
|
||||
//
|
||||
func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) {
|
||||
ch := make(chan item)
|
||||
|
||||
|
@ -127,19 +125,18 @@ func allPackages(ctxt *build.Context, root string, ch chan<- item) {
|
|||
// ExpandPatterns returns the set of packages matched by patterns,
|
||||
// which may have the following forms:
|
||||
//
|
||||
// golang.org/x/tools/cmd/guru # a single package
|
||||
// golang.org/x/tools/... # all packages beneath dir
|
||||
// ... # the entire workspace.
|
||||
// golang.org/x/tools/cmd/guru # a single package
|
||||
// golang.org/x/tools/... # all packages beneath dir
|
||||
// ... # the entire workspace.
|
||||
//
|
||||
// Order is significant: a pattern preceded by '-' removes matching
|
||||
// packages from the set. For example, these patterns match all encoding
|
||||
// packages except encoding/xml:
|
||||
//
|
||||
// encoding/... -encoding/xml
|
||||
// encoding/... -encoding/xml
|
||||
//
|
||||
// A trailing slash in a pattern is ignored. (Path components of Go
|
||||
// package names are separated by slash, not the platform's path separator.)
|
||||
//
|
||||
func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool {
|
||||
// TODO(adonovan): support other features of 'go list':
|
||||
// - "std"/"cmd"/"all" meta-packages
|
||||
|
|
1
vendor/golang.org/x/tools/go/buildutil/fakecontext.go
generated
vendored
1
vendor/golang.org/x/tools/go/buildutil/fakecontext.go
generated
vendored
|
@ -30,7 +30,6 @@ import (
|
|||
// /go/src/ including, for instance, "math" and "math/big".
|
||||
// ReadDir("/go/src/math/big") would return all the files in the
|
||||
// "math/big" package.
|
||||
//
|
||||
func FakeContext(pkgs map[string]map[string]string) *build.Context {
|
||||
clean := func(filename string) string {
|
||||
f := path.Clean(filepath.ToSlash(filename))
|
||||
|
|
3
vendor/golang.org/x/tools/go/buildutil/overlay.go
generated
vendored
3
vendor/golang.org/x/tools/go/buildutil/overlay.go
generated
vendored
|
@ -60,8 +60,7 @@ func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Conte
|
|||
// ParseOverlayArchive parses an archive containing Go files and their
|
||||
// contents. The result is intended to be used with OverlayContext.
|
||||
//
|
||||
//
|
||||
// Archive format
|
||||
// # Archive format
|
||||
//
|
||||
// The archive consists of a series of files. Each file consists of a
|
||||
// name, a decimal file size and the file contents, separated by
|
||||
|
|
3
vendor/golang.org/x/tools/go/buildutil/tags.go
generated
vendored
3
vendor/golang.org/x/tools/go/buildutil/tags.go
generated
vendored
|
@ -20,7 +20,8 @@ const TagsFlagDoc = "a list of `build tags` to consider satisfied during the bui
|
|||
// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag.
|
||||
//
|
||||
// Example:
|
||||
// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
|
||||
//
|
||||
// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc)
|
||||
type TagsFlag []string
|
||||
|
||||
func (v *TagsFlag) Set(s string) error {
|
||||
|
|
3
vendor/golang.org/x/tools/go/buildutil/util.go
generated
vendored
3
vendor/golang.org/x/tools/go/buildutil/util.go
generated
vendored
|
@ -28,7 +28,6 @@ import (
|
|||
// filename that will be attached to the ASTs.
|
||||
//
|
||||
// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws.
|
||||
//
|
||||
func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) {
|
||||
if !IsAbsPath(ctxt, file) {
|
||||
file = JoinPath(ctxt, dir, file)
|
||||
|
@ -51,7 +50,6 @@ func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string
|
|||
//
|
||||
// The '...Files []string' fields of the resulting build.Package are not
|
||||
// populated (build.FindOnly mode).
|
||||
//
|
||||
func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) {
|
||||
if !IsAbsPath(ctxt, filename) {
|
||||
filename = JoinPath(ctxt, dir, filename)
|
||||
|
@ -196,7 +194,6 @@ func SplitPathList(ctxt *build.Context, s string) []string {
|
|||
|
||||
// sameFile returns true if x and y have the same basename and denote
|
||||
// the same file.
|
||||
//
|
||||
func sameFile(x, y string) bool {
|
||||
if path.Clean(x) == path.Clean(y) {
|
||||
return true
|
||||
|
|
3
vendor/golang.org/x/tools/go/cfg/builder.go
generated
vendored
3
vendor/golang.org/x/tools/go/cfg/builder.go
generated
vendored
|
@ -443,7 +443,6 @@ func (b *builder) rangeStmt(s *ast.RangeStmt, label *lblock) {
|
|||
// Destinations associated with unlabeled for/switch/select stmts.
|
||||
// We push/pop one of these as we enter/leave each construct and for
|
||||
// each BranchStmt we scan for the innermost target of the right type.
|
||||
//
|
||||
type targets struct {
|
||||
tail *targets // rest of stack
|
||||
_break *Block
|
||||
|
@ -454,7 +453,6 @@ type targets struct {
|
|||
// Destinations associated with a labeled block.
|
||||
// We populate these as labels are encountered in forward gotos or
|
||||
// labeled statements.
|
||||
//
|
||||
type lblock struct {
|
||||
_goto *Block
|
||||
_break *Block
|
||||
|
@ -463,7 +461,6 @@ type lblock struct {
|
|||
|
||||
// labeledBlock returns the branch target associated with the
|
||||
// specified label, creating it if needed.
|
||||
//
|
||||
func (b *builder) labeledBlock(label *ast.Ident) *lblock {
|
||||
lb := b.lblocks[label.Obj]
|
||||
if lb == nil {
|
||||
|
|
17
vendor/golang.org/x/tools/go/cfg/cfg.go
generated
vendored
17
vendor/golang.org/x/tools/go/cfg/cfg.go
generated
vendored
|
@ -20,14 +20,14 @@
|
|||
//
|
||||
// produces this CFG:
|
||||
//
|
||||
// 1: x := f()
|
||||
// x != nil
|
||||
// succs: 2, 3
|
||||
// 2: T()
|
||||
// succs: 4
|
||||
// 3: F()
|
||||
// succs: 4
|
||||
// 4:
|
||||
// 1: x := f()
|
||||
// x != nil
|
||||
// succs: 2, 3
|
||||
// 2: T()
|
||||
// succs: 4
|
||||
// 3: F()
|
||||
// succs: 4
|
||||
// 4:
|
||||
//
|
||||
// The CFG does contain Return statements; even implicit returns are
|
||||
// materialized (at the position of the function's closing brace).
|
||||
|
@ -36,7 +36,6 @@
|
|||
// edges, nor the short-circuit semantics of the && and || operators,
|
||||
// nor abnormal control flow caused by panic. If you need this
|
||||
// information, use golang.org/x/tools/go/ssa instead.
|
||||
//
|
||||
package cfg
|
||||
|
||||
import (
|
||||
|
|
49
vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
generated
vendored
49
vendor/golang.org/x/tools/go/gcexportdata/gcexportdata.go
generated
vendored
|
@ -17,32 +17,47 @@
|
|||
// developer tools, which will then be able to consume both Go 1.7 and
|
||||
// Go 1.8 export data files, so they will work before and after the
|
||||
// Go update. (See discussion at https://golang.org/issue/15651.)
|
||||
//
|
||||
package gcexportdata // import "golang.org/x/tools/go/gcexportdata"
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os/exec"
|
||||
|
||||
"golang.org/x/tools/go/internal/gcimporter"
|
||||
)
|
||||
|
||||
// Find returns the name of an object (.o) or archive (.a) file
|
||||
// containing type information for the specified import path,
|
||||
// using the workspace layout conventions of go/build.
|
||||
// using the go command.
|
||||
// If no file was found, an empty filename is returned.
|
||||
//
|
||||
// A relative srcDir is interpreted relative to the current working directory.
|
||||
//
|
||||
// Find also returns the package's resolved (canonical) import path,
|
||||
// reflecting the effects of srcDir and vendoring on importPath.
|
||||
//
|
||||
// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
|
||||
// which is more efficient.
|
||||
func Find(importPath, srcDir string) (filename, path string) {
|
||||
return gcimporter.FindPkg(importPath, srcDir)
|
||||
cmd := exec.Command("go", "list", "-json", "-export", "--", importPath)
|
||||
cmd.Dir = srcDir
|
||||
out, err := cmd.CombinedOutput()
|
||||
if err != nil {
|
||||
return "", ""
|
||||
}
|
||||
var data struct {
|
||||
ImportPath string
|
||||
Export string
|
||||
}
|
||||
json.Unmarshal(out, &data)
|
||||
return data.Export, data.ImportPath
|
||||
}
|
||||
|
||||
// NewReader returns a reader for the export data section of an object
|
||||
|
@ -101,13 +116,29 @@ func Read(in io.Reader, fset *token.FileSet, imports map[string]*types.Package,
|
|||
// The indexed export format starts with an 'i'; the older
|
||||
// binary export format starts with a 'c', 'd', or 'v'
|
||||
// (from "version"). Select appropriate importer.
|
||||
if len(data) > 0 && data[0] == 'i' {
|
||||
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
|
||||
return pkg, err
|
||||
}
|
||||
if len(data) > 0 {
|
||||
switch data[0] {
|
||||
case 'i':
|
||||
_, pkg, err := gcimporter.IImportData(fset, imports, data[1:], path)
|
||||
return pkg, err
|
||||
|
||||
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
|
||||
return pkg, err
|
||||
case 'v', 'c', 'd':
|
||||
_, pkg, err := gcimporter.BImportData(fset, imports, data, path)
|
||||
return pkg, err
|
||||
|
||||
case 'u':
|
||||
_, pkg, err := gcimporter.UImportData(fset, imports, data[1:], path)
|
||||
return pkg, err
|
||||
|
||||
default:
|
||||
l := len(data)
|
||||
if l > 10 {
|
||||
l = 10
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), path)
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("empty export data for %s", path)
|
||||
}
|
||||
|
||||
// Write writes encoded type information for the specified package to out.
|
||||
|
|
2
vendor/golang.org/x/tools/go/gcexportdata/importer.go
generated
vendored
2
vendor/golang.org/x/tools/go/gcexportdata/importer.go
generated
vendored
|
@ -23,6 +23,8 @@ import (
|
|||
// or to control the FileSet or access the imports map populated during
|
||||
// package loading.
|
||||
//
|
||||
// Deprecated: Use the higher-level API in golang.org/x/tools/go/packages,
|
||||
// which is more efficient.
|
||||
func NewImporter(fset *token.FileSet, imports map[string]*types.Package) types.ImporterFrom {
|
||||
return importer{fset, imports}
|
||||
}
|
||||
|
|
1
vendor/golang.org/x/tools/go/internal/cgo/cgo.go
generated
vendored
1
vendor/golang.org/x/tools/go/internal/cgo/cgo.go
generated
vendored
|
@ -69,7 +69,6 @@ import (
|
|||
|
||||
// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses
|
||||
// the output and returns the resulting ASTs.
|
||||
//
|
||||
func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) {
|
||||
tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C")
|
||||
if err != nil {
|
||||
|
|
20
vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
generated
vendored
20
vendor/golang.org/x/tools/go/internal/gcimporter/bexport.go
generated
vendored
|
@ -35,16 +35,18 @@ import (
|
|||
const debugFormat = false // default: false
|
||||
|
||||
// Current export format version. Increase with each format change.
|
||||
//
|
||||
// Note: The latest binary (non-indexed) export format is at version 6.
|
||||
// This exporter is still at level 4, but it doesn't matter since
|
||||
// the binary importer can handle older versions just fine.
|
||||
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
|
||||
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMEMTED HERE
|
||||
// 4: type name objects support type aliases, uses aliasTag
|
||||
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
|
||||
// 2: removed unused bool in ODCL export (compiler only)
|
||||
// 1: header format change (more regular), export package for _ struct fields
|
||||
// 0: Go1.7 encoding
|
||||
// This exporter is still at level 4, but it doesn't matter since
|
||||
// the binary importer can handle older versions just fine.
|
||||
//
|
||||
// 6: package height (CL 105038) -- NOT IMPLEMENTED HERE
|
||||
// 5: improved position encoding efficiency (issue 20080, CL 41619) -- NOT IMPLEMENTED HERE
|
||||
// 4: type name objects support type aliases, uses aliasTag
|
||||
// 3: Go1.8 encoding (same as version 2, aliasTag defined but never used)
|
||||
// 2: removed unused bool in ODCL export (compiler only)
|
||||
// 1: header format change (more regular), export package for _ struct fields
|
||||
// 0: Go1.7 encoding
|
||||
const exportVersion = 4
|
||||
|
||||
// trackAllTypes enables cycle tracking for all types, not just named
|
||||
|
|
161
vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
generated
vendored
161
vendor/golang.org/x/tools/go/internal/gcimporter/gcimporter.go
generated
vendored
|
@ -45,7 +45,6 @@ var pkgExts = [...]string{".a", ".o"}
|
|||
// the build.Default build.Context). A relative srcDir is interpreted
|
||||
// relative to the current working directory.
|
||||
// If no file was found, an empty filename is returned.
|
||||
//
|
||||
func FindPkg(path, srcDir string) (filename, id string) {
|
||||
if path == "" {
|
||||
return
|
||||
|
@ -109,7 +108,6 @@ func FindPkg(path, srcDir string) (filename, id string) {
|
|||
// If packages[id] contains the completely imported package, that package
|
||||
// can be used directly, and there is no need to call this function (but
|
||||
// there is also no harm but for extra time used).
|
||||
//
|
||||
func ImportData(packages map[string]*types.Package, filename, id string, data io.Reader) (pkg *types.Package, err error) {
|
||||
// support for parser error handling
|
||||
defer func() {
|
||||
|
@ -133,7 +131,6 @@ func ImportData(packages map[string]*types.Package, filename, id string, data io
|
|||
// Import imports a gc-generated package given its import path and srcDir, adds
|
||||
// the corresponding package object to the packages map, and returns the object.
|
||||
// The packages map must contain all packages already imported.
|
||||
//
|
||||
func Import(packages map[string]*types.Package, path, srcDir string, lookup func(path string) (io.ReadCloser, error)) (pkg *types.Package, err error) {
|
||||
var rc io.ReadCloser
|
||||
var filename, id string
|
||||
|
@ -184,8 +181,9 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
|
|||
defer rc.Close()
|
||||
|
||||
var hdr string
|
||||
var size int64
|
||||
buf := bufio.NewReader(rc)
|
||||
if hdr, _, err = FindExportData(buf); err != nil {
|
||||
if hdr, size, err = FindExportData(buf); err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -213,10 +211,27 @@ func Import(packages map[string]*types.Package, path, srcDir string, lookup func
|
|||
// The indexed export format starts with an 'i'; the older
|
||||
// binary export format starts with a 'c', 'd', or 'v'
|
||||
// (from "version"). Select appropriate importer.
|
||||
if len(data) > 0 && data[0] == 'i' {
|
||||
_, pkg, err = IImportData(fset, packages, data[1:], id)
|
||||
} else {
|
||||
_, pkg, err = BImportData(fset, packages, data, id)
|
||||
if len(data) > 0 {
|
||||
switch data[0] {
|
||||
case 'i':
|
||||
_, pkg, err := IImportData(fset, packages, data[1:], id)
|
||||
return pkg, err
|
||||
|
||||
case 'v', 'c', 'd':
|
||||
_, pkg, err := BImportData(fset, packages, data, id)
|
||||
return pkg, err
|
||||
|
||||
case 'u':
|
||||
_, pkg, err := UImportData(fset, packages, data[1:size], id)
|
||||
return pkg, err
|
||||
|
||||
default:
|
||||
l := len(data)
|
||||
if l > 10 {
|
||||
l = 10
|
||||
}
|
||||
return nil, fmt.Errorf("unexpected export data with prefix %q for path %s", string(data[:l]), id)
|
||||
}
|
||||
}
|
||||
|
||||
default:
|
||||
|
@ -348,8 +363,9 @@ func (p *parser) expectKeyword(keyword string) {
|
|||
// ----------------------------------------------------------------------------
|
||||
// Qualified and unqualified names
|
||||
|
||||
// PackageId = string_lit .
|
||||
// parsePackageID parses a PackageId:
|
||||
//
|
||||
// PackageId = string_lit .
|
||||
func (p *parser) parsePackageID() string {
|
||||
id, err := strconv.Unquote(p.expect(scanner.String))
|
||||
if err != nil {
|
||||
|
@ -363,13 +379,16 @@ func (p *parser) parsePackageID() string {
|
|||
return id
|
||||
}
|
||||
|
||||
// PackageName = ident .
|
||||
// parsePackageName parse a PackageName:
|
||||
//
|
||||
// PackageName = ident .
|
||||
func (p *parser) parsePackageName() string {
|
||||
return p.expect(scanner.Ident)
|
||||
}
|
||||
|
||||
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
|
||||
// parseDotIdent parses a dotIdentifier:
|
||||
//
|
||||
// dotIdentifier = ( ident | '·' ) { ident | int | '·' } .
|
||||
func (p *parser) parseDotIdent() string {
|
||||
ident := ""
|
||||
if p.tok != scanner.Int {
|
||||
|
@ -386,8 +405,9 @@ func (p *parser) parseDotIdent() string {
|
|||
return ident
|
||||
}
|
||||
|
||||
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
|
||||
// parseQualifiedName parses a QualifiedName:
|
||||
//
|
||||
// QualifiedName = "@" PackageId "." ( "?" | dotIdentifier ) .
|
||||
func (p *parser) parseQualifiedName() (id, name string) {
|
||||
p.expect('@')
|
||||
id = p.parsePackageID()
|
||||
|
@ -410,7 +430,6 @@ func (p *parser) parseQualifiedName() (id, name string) {
|
|||
// id identifies a package, usually by a canonical package path like
|
||||
// "encoding/json" but possibly by a non-canonical import path like
|
||||
// "./json".
|
||||
//
|
||||
func (p *parser) getPkg(id, name string) *types.Package {
|
||||
// package unsafe is not in the packages maps - handle explicitly
|
||||
if id == "unsafe" {
|
||||
|
@ -446,7 +465,6 @@ func (p *parser) getPkg(id, name string) *types.Package {
|
|||
|
||||
// parseExportedName is like parseQualifiedName, but
|
||||
// the package id is resolved to an imported *types.Package.
|
||||
//
|
||||
func (p *parser) parseExportedName() (pkg *types.Package, name string) {
|
||||
id, name := p.parseQualifiedName()
|
||||
pkg = p.getPkg(id, "")
|
||||
|
@ -456,8 +474,9 @@ func (p *parser) parseExportedName() (pkg *types.Package, name string) {
|
|||
// ----------------------------------------------------------------------------
|
||||
// Types
|
||||
|
||||
// BasicType = identifier .
|
||||
// parseBasicType parses a BasicType:
|
||||
//
|
||||
// BasicType = identifier .
|
||||
func (p *parser) parseBasicType() types.Type {
|
||||
id := p.expect(scanner.Ident)
|
||||
obj := types.Universe.Lookup(id)
|
||||
|
@ -468,8 +487,9 @@ func (p *parser) parseBasicType() types.Type {
|
|||
return nil
|
||||
}
|
||||
|
||||
// ArrayType = "[" int_lit "]" Type .
|
||||
// parseArrayType parses an ArrayType:
|
||||
//
|
||||
// ArrayType = "[" int_lit "]" Type .
|
||||
func (p *parser) parseArrayType(parent *types.Package) types.Type {
|
||||
// "[" already consumed and lookahead known not to be "]"
|
||||
lit := p.expect(scanner.Int)
|
||||
|
@ -482,8 +502,9 @@ func (p *parser) parseArrayType(parent *types.Package) types.Type {
|
|||
return types.NewArray(elem, n)
|
||||
}
|
||||
|
||||
// MapType = "map" "[" Type "]" Type .
|
||||
// parseMapType parses a MapType:
|
||||
//
|
||||
// MapType = "map" "[" Type "]" Type .
|
||||
func (p *parser) parseMapType(parent *types.Package) types.Type {
|
||||
p.expectKeyword("map")
|
||||
p.expect('[')
|
||||
|
@ -493,7 +514,9 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
|
|||
return types.NewMap(key, elem)
|
||||
}
|
||||
|
||||
// Name = identifier | "?" | QualifiedName .
|
||||
// parseName parses a Name:
|
||||
//
|
||||
// Name = identifier | "?" | QualifiedName .
|
||||
//
|
||||
// For unqualified and anonymous names, the returned package is the parent
|
||||
// package unless parent == nil, in which case the returned package is the
|
||||
|
@ -505,7 +528,6 @@ func (p *parser) parseMapType(parent *types.Package) types.Type {
|
|||
// it doesn't exist yet) unless materializePkg is set (which creates an
|
||||
// unnamed package with valid package path). In the latter case, a
|
||||
// subsequent import clause is expected to provide a name for the package.
|
||||
//
|
||||
func (p *parser) parseName(parent *types.Package, materializePkg bool) (pkg *types.Package, name string) {
|
||||
pkg = parent
|
||||
if pkg == nil {
|
||||
|
@ -539,8 +561,9 @@ func deref(typ types.Type) types.Type {
|
|||
return typ
|
||||
}
|
||||
|
||||
// Field = Name Type [ string_lit ] .
|
||||
// parseField parses a Field:
|
||||
//
|
||||
// Field = Name Type [ string_lit ] .
|
||||
func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
|
||||
pkg, name := p.parseName(parent, true)
|
||||
|
||||
|
@ -583,9 +606,10 @@ func (p *parser) parseField(parent *types.Package) (*types.Var, string) {
|
|||
return types.NewField(token.NoPos, pkg, name, typ, anonymous), tag
|
||||
}
|
||||
|
||||
// StructType = "struct" "{" [ FieldList ] "}" .
|
||||
// FieldList = Field { ";" Field } .
|
||||
// parseStructType parses a StructType:
|
||||
//
|
||||
// StructType = "struct" "{" [ FieldList ] "}" .
|
||||
// FieldList = Field { ";" Field } .
|
||||
func (p *parser) parseStructType(parent *types.Package) types.Type {
|
||||
var fields []*types.Var
|
||||
var tags []string
|
||||
|
@ -610,8 +634,9 @@ func (p *parser) parseStructType(parent *types.Package) types.Type {
|
|||
return types.NewStruct(fields, tags)
|
||||
}
|
||||
|
||||
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
|
||||
// parseParameter parses a Parameter:
|
||||
//
|
||||
// Parameter = ( identifier | "?" ) [ "..." ] Type [ string_lit ] .
|
||||
func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
|
||||
_, name := p.parseName(nil, false)
|
||||
// remove gc-specific parameter numbering
|
||||
|
@ -635,9 +660,10 @@ func (p *parser) parseParameter() (par *types.Var, isVariadic bool) {
|
|||
return
|
||||
}
|
||||
|
||||
// Parameters = "(" [ ParameterList ] ")" .
|
||||
// ParameterList = { Parameter "," } Parameter .
|
||||
// parseParameters parses a Parameters:
|
||||
//
|
||||
// Parameters = "(" [ ParameterList ] ")" .
|
||||
// ParameterList = { Parameter "," } Parameter .
|
||||
func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
|
||||
p.expect('(')
|
||||
for p.tok != ')' && p.tok != scanner.EOF {
|
||||
|
@ -658,9 +684,10 @@ func (p *parser) parseParameters() (list []*types.Var, isVariadic bool) {
|
|||
return
|
||||
}
|
||||
|
||||
// Signature = Parameters [ Result ] .
|
||||
// Result = Type | Parameters .
|
||||
// parseSignature parses a Signature:
|
||||
//
|
||||
// Signature = Parameters [ Result ] .
|
||||
// Result = Type | Parameters .
|
||||
func (p *parser) parseSignature(recv *types.Var) *types.Signature {
|
||||
params, isVariadic := p.parseParameters()
|
||||
|
||||
|
@ -677,14 +704,15 @@ func (p *parser) parseSignature(recv *types.Var) *types.Signature {
|
|||
return types.NewSignature(recv, types.NewTuple(params...), types.NewTuple(results...), isVariadic)
|
||||
}
|
||||
|
||||
// InterfaceType = "interface" "{" [ MethodList ] "}" .
|
||||
// MethodList = Method { ";" Method } .
|
||||
// Method = Name Signature .
|
||||
// parseInterfaceType parses an InterfaceType:
|
||||
//
|
||||
// InterfaceType = "interface" "{" [ MethodList ] "}" .
|
||||
// MethodList = Method { ";" Method } .
|
||||
// Method = Name Signature .
|
||||
//
|
||||
// The methods of embedded interfaces are always "inlined"
|
||||
// by the compiler and thus embedded interfaces are never
|
||||
// visible in the export data.
|
||||
//
|
||||
func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
|
||||
var methods []*types.Func
|
||||
|
||||
|
@ -705,8 +733,9 @@ func (p *parser) parseInterfaceType(parent *types.Package) types.Type {
|
|||
return newInterface(methods, nil).Complete()
|
||||
}
|
||||
|
||||
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
|
||||
// parseChanType parses a ChanType:
|
||||
//
|
||||
// ChanType = ( "chan" [ "<-" ] | "<-" "chan" ) Type .
|
||||
func (p *parser) parseChanType(parent *types.Package) types.Type {
|
||||
dir := types.SendRecv
|
||||
if p.tok == scanner.Ident {
|
||||
|
@ -724,17 +753,18 @@ func (p *parser) parseChanType(parent *types.Package) types.Type {
|
|||
return types.NewChan(dir, elem)
|
||||
}
|
||||
|
||||
// Type =
|
||||
// BasicType | TypeName | ArrayType | SliceType | StructType |
|
||||
// PointerType | FuncType | InterfaceType | MapType | ChanType |
|
||||
// "(" Type ")" .
|
||||
// parseType parses a Type:
|
||||
//
|
||||
// BasicType = ident .
|
||||
// TypeName = ExportedName .
|
||||
// SliceType = "[" "]" Type .
|
||||
// PointerType = "*" Type .
|
||||
// FuncType = "func" Signature .
|
||||
// Type =
|
||||
// BasicType | TypeName | ArrayType | SliceType | StructType |
|
||||
// PointerType | FuncType | InterfaceType | MapType | ChanType |
|
||||
// "(" Type ")" .
|
||||
//
|
||||
// BasicType = ident .
|
||||
// TypeName = ExportedName .
|
||||
// SliceType = "[" "]" Type .
|
||||
// PointerType = "*" Type .
|
||||
// FuncType = "func" Signature .
|
||||
func (p *parser) parseType(parent *types.Package) types.Type {
|
||||
switch p.tok {
|
||||
case scanner.Ident:
|
||||
|
@ -786,16 +816,18 @@ func (p *parser) parseType(parent *types.Package) types.Type {
|
|||
// ----------------------------------------------------------------------------
|
||||
// Declarations
|
||||
|
||||
// ImportDecl = "import" PackageName PackageId .
|
||||
// parseImportDecl parses an ImportDecl:
|
||||
//
|
||||
// ImportDecl = "import" PackageName PackageId .
|
||||
func (p *parser) parseImportDecl() {
|
||||
p.expectKeyword("import")
|
||||
name := p.parsePackageName()
|
||||
p.getPkg(p.parsePackageID(), name)
|
||||
}
|
||||
|
||||
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
|
||||
// parseInt parses an int_lit:
|
||||
//
|
||||
// int_lit = [ "+" | "-" ] { "0" ... "9" } .
|
||||
func (p *parser) parseInt() string {
|
||||
s := ""
|
||||
switch p.tok {
|
||||
|
@ -808,8 +840,9 @@ func (p *parser) parseInt() string {
|
|||
return s + p.expect(scanner.Int)
|
||||
}
|
||||
|
||||
// number = int_lit [ "p" int_lit ] .
|
||||
// parseNumber parses a number:
|
||||
//
|
||||
// number = int_lit [ "p" int_lit ] .
|
||||
func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
|
||||
// mantissa
|
||||
mant := constant.MakeFromLiteral(p.parseInt(), token.INT, 0)
|
||||
|
@ -844,13 +877,14 @@ func (p *parser) parseNumber() (typ *types.Basic, val constant.Value) {
|
|||
return
|
||||
}
|
||||
|
||||
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
|
||||
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
|
||||
// bool_lit = "true" | "false" .
|
||||
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
|
||||
// rune_lit = "(" int_lit "+" int_lit ")" .
|
||||
// string_lit = `"` { unicode_char } `"` .
|
||||
// parseConstDecl parses a ConstDecl:
|
||||
//
|
||||
// ConstDecl = "const" ExportedName [ Type ] "=" Literal .
|
||||
// Literal = bool_lit | int_lit | float_lit | complex_lit | rune_lit | string_lit .
|
||||
// bool_lit = "true" | "false" .
|
||||
// complex_lit = "(" float_lit "+" float_lit "i" ")" .
|
||||
// rune_lit = "(" int_lit "+" int_lit ")" .
|
||||
// string_lit = `"` { unicode_char } `"` .
|
||||
func (p *parser) parseConstDecl() {
|
||||
p.expectKeyword("const")
|
||||
pkg, name := p.parseExportedName()
|
||||
|
@ -920,8 +954,9 @@ func (p *parser) parseConstDecl() {
|
|||
pkg.Scope().Insert(types.NewConst(token.NoPos, pkg, name, typ0, val))
|
||||
}
|
||||
|
||||
// TypeDecl = "type" ExportedName Type .
|
||||
// parseTypeDecl parses a TypeDecl:
|
||||
//
|
||||
// TypeDecl = "type" ExportedName Type .
|
||||
func (p *parser) parseTypeDecl() {
|
||||
p.expectKeyword("type")
|
||||
pkg, name := p.parseExportedName()
|
||||
|
@ -939,8 +974,9 @@ func (p *parser) parseTypeDecl() {
|
|||
}
|
||||
}
|
||||
|
||||
// VarDecl = "var" ExportedName Type .
|
||||
// parseVarDecl parses a VarDecl:
|
||||
//
|
||||
// VarDecl = "var" ExportedName Type .
|
||||
func (p *parser) parseVarDecl() {
|
||||
p.expectKeyword("var")
|
||||
pkg, name := p.parseExportedName()
|
||||
|
@ -948,9 +984,10 @@ func (p *parser) parseVarDecl() {
|
|||
pkg.Scope().Insert(types.NewVar(token.NoPos, pkg, name, typ))
|
||||
}
|
||||
|
||||
// Func = Signature [ Body ] .
|
||||
// Body = "{" ... "}" .
|
||||
// parseFunc parses a Func:
|
||||
//
|
||||
// Func = Signature [ Body ] .
|
||||
// Body = "{" ... "}" .
|
||||
func (p *parser) parseFunc(recv *types.Var) *types.Signature {
|
||||
sig := p.parseSignature(recv)
|
||||
if p.tok == '{' {
|
||||
|
@ -967,9 +1004,10 @@ func (p *parser) parseFunc(recv *types.Var) *types.Signature {
|
|||
return sig
|
||||
}
|
||||
|
||||
// MethodDecl = "func" Receiver Name Func .
|
||||
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
|
||||
// parseMethodDecl parses a MethodDecl:
|
||||
//
|
||||
// MethodDecl = "func" Receiver Name Func .
|
||||
// Receiver = "(" ( identifier | "?" ) [ "*" ] ExportedName ")" .
|
||||
func (p *parser) parseMethodDecl() {
|
||||
// "func" already consumed
|
||||
p.expect('(')
|
||||
|
@ -992,8 +1030,9 @@ func (p *parser) parseMethodDecl() {
|
|||
base.AddMethod(types.NewFunc(token.NoPos, pkg, name, sig))
|
||||
}
|
||||
|
||||
// FuncDecl = "func" ExportedName Func .
|
||||
// parseFuncDecl parses a FuncDecl:
|
||||
//
|
||||
// FuncDecl = "func" ExportedName Func .
|
||||
func (p *parser) parseFuncDecl() {
|
||||
// "func" already consumed
|
||||
pkg, name := p.parseExportedName()
|
||||
|
@ -1001,8 +1040,9 @@ func (p *parser) parseFuncDecl() {
|
|||
pkg.Scope().Insert(types.NewFunc(token.NoPos, pkg, name, typ))
|
||||
}
|
||||
|
||||
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
|
||||
// parseDecl parses a Decl:
|
||||
//
|
||||
// Decl = [ ImportDecl | ConstDecl | TypeDecl | VarDecl | FuncDecl | MethodDecl ] "\n" .
|
||||
func (p *parser) parseDecl() {
|
||||
if p.tok == scanner.Ident {
|
||||
switch p.lit {
|
||||
|
@ -1029,9 +1069,10 @@ func (p *parser) parseDecl() {
|
|||
// ----------------------------------------------------------------------------
|
||||
// Export
|
||||
|
||||
// Export = "PackageClause { Decl } "$$" .
|
||||
// PackageClause = "package" PackageName [ "safe" ] "\n" .
|
||||
// parseExport parses an Export:
|
||||
//
|
||||
// Export = "PackageClause { Decl } "$$" .
|
||||
// PackageClause = "package" PackageName [ "safe" ] "\n" .
|
||||
func (p *parser) parseExport() *types.Package {
|
||||
p.expectKeyword("package")
|
||||
name := p.parsePackageName()
|
||||
|
|
5
vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
generated
vendored
5
vendor/golang.org/x/tools/go/internal/gcimporter/iexport.go
generated
vendored
|
@ -251,7 +251,10 @@ func (p *iexporter) stringOff(s string) uint64 {
|
|||
// pushDecl adds n to the declaration work queue, if not already present.
|
||||
func (p *iexporter) pushDecl(obj types.Object) {
|
||||
// Package unsafe is known to the compiler and predeclared.
|
||||
assert(obj.Pkg() != types.Unsafe)
|
||||
// Caller should not ask us to do export it.
|
||||
if obj.Pkg() == types.Unsafe {
|
||||
panic("cannot export package unsafe")
|
||||
}
|
||||
|
||||
if _, ok := p.declIndex[obj]; ok {
|
||||
return
|
||||
|
|
93
vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
generated
vendored
93
vendor/golang.org/x/tools/go/internal/gcimporter/iimport.go
generated
vendored
|
@ -17,6 +17,7 @@ import (
|
|||
"go/token"
|
||||
"go/types"
|
||||
"io"
|
||||
"math/big"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
|
@ -53,7 +54,7 @@ const (
|
|||
)
|
||||
|
||||
type ident struct {
|
||||
pkg string
|
||||
pkg *types.Package
|
||||
name string
|
||||
}
|
||||
|
||||
|
@ -100,7 +101,9 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
|
|||
if !debug {
|
||||
defer func() {
|
||||
if e := recover(); e != nil {
|
||||
if version > currentVersion {
|
||||
if bundle {
|
||||
err = fmt.Errorf("%v", e)
|
||||
} else if version > currentVersion {
|
||||
err = fmt.Errorf("cannot import %q (%v), export data is newer version - update tool", path, e)
|
||||
} else {
|
||||
err = fmt.Errorf("cannot import %q (%v), possibly version skew - reinstall package", path, e)
|
||||
|
@ -237,6 +240,15 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
|
|||
pkg.MarkComplete()
|
||||
}
|
||||
|
||||
// SetConstraint can't be called if the constraint type is not yet complete.
|
||||
// When type params are created in the 'P' case of (*importReader).obj(),
|
||||
// the associated constraint type may not be complete due to recursion.
|
||||
// Therefore, we defer calling SetConstraint there, and call it here instead
|
||||
// after all types are complete.
|
||||
for _, d := range p.later {
|
||||
typeparams.SetTypeParamConstraint(d.t, d.constraint)
|
||||
}
|
||||
|
||||
for _, typ := range p.interfaceList {
|
||||
typ.Complete()
|
||||
}
|
||||
|
@ -244,6 +256,11 @@ func iimportCommon(fset *token.FileSet, imports map[string]*types.Package, data
|
|||
return pkgs, nil
|
||||
}
|
||||
|
||||
type setConstraintArgs struct {
|
||||
t *typeparams.TypeParam
|
||||
constraint types.Type
|
||||
}
|
||||
|
||||
type iimporter struct {
|
||||
version int
|
||||
ipath string
|
||||
|
@ -260,6 +277,9 @@ type iimporter struct {
|
|||
fake fakeFileSet
|
||||
interfaceList []*types.Interface
|
||||
|
||||
// Arguments for calls to SetConstraint that are deferred due to recursive types
|
||||
later []setConstraintArgs
|
||||
|
||||
indent int // for tracing support
|
||||
}
|
||||
|
||||
|
@ -444,7 +464,7 @@ func (r *importReader) obj(name string) {
|
|||
|
||||
// To handle recursive references to the typeparam within its
|
||||
// bound, save the partial type in tparamIndex before reading the bounds.
|
||||
id := ident{r.currPkg.Name(), name}
|
||||
id := ident{r.currPkg, name}
|
||||
r.p.tparamIndex[id] = t
|
||||
var implicit bool
|
||||
if r.p.version >= iexportVersionGo1_18 {
|
||||
|
@ -458,7 +478,11 @@ func (r *importReader) obj(name string) {
|
|||
}
|
||||
typeparams.MarkImplicit(iface)
|
||||
}
|
||||
typeparams.SetTypeParamConstraint(t, constraint)
|
||||
// The constraint type may not be complete, if we
|
||||
// are in the middle of a type recursion involving type
|
||||
// constraints. So, we defer SetConstraint until we have
|
||||
// completely set up all types in ImportData.
|
||||
r.p.later = append(r.p.later, setConstraintArgs{t: t, constraint: constraint})
|
||||
|
||||
case 'V':
|
||||
typ := r.typ()
|
||||
|
@ -489,7 +513,9 @@ func (r *importReader) value() (typ types.Type, val constant.Value) {
|
|||
val = constant.MakeString(r.string())
|
||||
|
||||
case types.IsInteger:
|
||||
val = r.mpint(b)
|
||||
var x big.Int
|
||||
r.mpint(&x, b)
|
||||
val = constant.Make(&x)
|
||||
|
||||
case types.IsFloat:
|
||||
val = r.mpfloat(b)
|
||||
|
@ -538,8 +564,8 @@ func intSize(b *types.Basic) (signed bool, maxBytes uint) {
|
|||
return
|
||||
}
|
||||
|
||||
func (r *importReader) mpint(b *types.Basic) constant.Value {
|
||||
signed, maxBytes := intSize(b)
|
||||
func (r *importReader) mpint(x *big.Int, typ *types.Basic) {
|
||||
signed, maxBytes := intSize(typ)
|
||||
|
||||
maxSmall := 256 - maxBytes
|
||||
if signed {
|
||||
|
@ -558,7 +584,8 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
|
|||
v = ^v
|
||||
}
|
||||
}
|
||||
return constant.MakeInt64(v)
|
||||
x.SetInt64(v)
|
||||
return
|
||||
}
|
||||
|
||||
v := -n
|
||||
|
@ -568,47 +595,23 @@ func (r *importReader) mpint(b *types.Basic) constant.Value {
|
|||
if v < 1 || uint(v) > maxBytes {
|
||||
errorf("weird decoding: %v, %v => %v", n, signed, v)
|
||||
}
|
||||
|
||||
buf := make([]byte, v)
|
||||
io.ReadFull(&r.declReader, buf)
|
||||
|
||||
// convert to little endian
|
||||
// TODO(gri) go/constant should have a more direct conversion function
|
||||
// (e.g., once it supports a big.Float based implementation)
|
||||
for i, j := 0, len(buf)-1; i < j; i, j = i+1, j-1 {
|
||||
buf[i], buf[j] = buf[j], buf[i]
|
||||
}
|
||||
|
||||
x := constant.MakeFromBytes(buf)
|
||||
b := make([]byte, v)
|
||||
io.ReadFull(&r.declReader, b)
|
||||
x.SetBytes(b)
|
||||
if signed && n&1 != 0 {
|
||||
x = constant.UnaryOp(token.SUB, x, 0)
|
||||
x.Neg(x)
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (r *importReader) mpfloat(b *types.Basic) constant.Value {
|
||||
x := r.mpint(b)
|
||||
if constant.Sign(x) == 0 {
|
||||
return x
|
||||
func (r *importReader) mpfloat(typ *types.Basic) constant.Value {
|
||||
var mant big.Int
|
||||
r.mpint(&mant, typ)
|
||||
var f big.Float
|
||||
f.SetInt(&mant)
|
||||
if f.Sign() != 0 {
|
||||
f.SetMantExp(&f, int(r.int64()))
|
||||
}
|
||||
|
||||
exp := r.int64()
|
||||
switch {
|
||||
case exp > 0:
|
||||
x = constant.Shift(x, token.SHL, uint(exp))
|
||||
// Ensure that the imported Kind is Float, else this constant may run into
|
||||
// bitsize limits on overlarge integers. Eventually we can instead adopt
|
||||
// the approach of CL 288632, but that CL relies on go/constant APIs that
|
||||
// were introduced in go1.13.
|
||||
//
|
||||
// TODO(rFindley): sync the logic here with tip Go once we no longer
|
||||
// support go1.12.
|
||||
x = constant.ToFloat(x)
|
||||
case exp < 0:
|
||||
d := constant.Shift(constant.MakeInt64(1), token.SHL, uint(-exp))
|
||||
x = constant.BinaryOp(x, token.QUO, d)
|
||||
}
|
||||
return x
|
||||
return constant.Make(&f)
|
||||
}
|
||||
|
||||
func (r *importReader) ident() string {
|
||||
|
@ -756,7 +759,7 @@ func (r *importReader) doType(base *types.Named) (res types.Type) {
|
|||
errorf("unexpected type param type")
|
||||
}
|
||||
pkg, name := r.qualifiedIdent()
|
||||
id := ident{pkg.Name(), name}
|
||||
id := ident{pkg, name}
|
||||
if t, ok := r.p.tparamIndex[id]; ok {
|
||||
// We're already in the process of importing this typeparam.
|
||||
return t
|
||||
|
|
10
vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
generated
vendored
Normal file
10
vendor/golang.org/x/tools/go/internal/gcimporter/unified_no.go
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build !(go1.18 && goexperiment.unified)
|
||||
// +build !go1.18 !goexperiment.unified
|
||||
|
||||
package gcimporter
|
||||
|
||||
const unifiedIR = false
|
10
vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
generated
vendored
Normal file
10
vendor/golang.org/x/tools/go/internal/gcimporter/unified_yes.go
generated
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build go1.18 && goexperiment.unified
|
||||
// +build go1.18,goexperiment.unified
|
||||
|
||||
package gcimporter
|
||||
|
||||
const unifiedIR = true
|
19
vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
generated
vendored
Normal file
19
vendor/golang.org/x/tools/go/internal/gcimporter/ureader_no.go
generated
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build !go1.18
|
||||
// +build !go1.18
|
||||
|
||||
package gcimporter
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/token"
|
||||
"go/types"
|
||||
)
|
||||
|
||||
func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
|
||||
err = fmt.Errorf("go/tools compiled with a Go version earlier than 1.18 cannot read unified IR export data")
|
||||
return
|
||||
}
|
612
vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
generated
vendored
Normal file
612
vendor/golang.org/x/tools/go/internal/gcimporter/ureader_yes.go
generated
vendored
Normal file
|
@ -0,0 +1,612 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Derived from go/internal/gcimporter/ureader.go
|
||||
|
||||
//go:build go1.18
|
||||
// +build go1.18
|
||||
|
||||
package gcimporter
|
||||
|
||||
import (
|
||||
"go/token"
|
||||
"go/types"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/go/internal/pkgbits"
|
||||
)
|
||||
|
||||
// A pkgReader holds the shared state for reading a unified IR package
|
||||
// description.
|
||||
type pkgReader struct {
|
||||
pkgbits.PkgDecoder
|
||||
|
||||
fake fakeFileSet
|
||||
|
||||
ctxt *types.Context
|
||||
imports map[string]*types.Package // previously imported packages, indexed by path
|
||||
|
||||
// lazily initialized arrays corresponding to the unified IR
|
||||
// PosBase, Pkg, and Type sections, respectively.
|
||||
posBases []string // position bases (i.e., file names)
|
||||
pkgs []*types.Package
|
||||
typs []types.Type
|
||||
|
||||
// laterFns holds functions that need to be invoked at the end of
|
||||
// import reading.
|
||||
laterFns []func()
|
||||
}
|
||||
|
||||
// later adds a function to be invoked at the end of import reading.
|
||||
func (pr *pkgReader) later(fn func()) {
|
||||
pr.laterFns = append(pr.laterFns, fn)
|
||||
}
|
||||
|
||||
// See cmd/compile/internal/noder.derivedInfo.
|
||||
type derivedInfo struct {
|
||||
idx pkgbits.Index
|
||||
needed bool
|
||||
}
|
||||
|
||||
// See cmd/compile/internal/noder.typeInfo.
|
||||
type typeInfo struct {
|
||||
idx pkgbits.Index
|
||||
derived bool
|
||||
}
|
||||
|
||||
func UImportData(fset *token.FileSet, imports map[string]*types.Package, data []byte, path string) (_ int, pkg *types.Package, err error) {
|
||||
s := string(data)
|
||||
s = s[:strings.LastIndex(s, "\n$$\n")]
|
||||
input := pkgbits.NewPkgDecoder(path, s)
|
||||
pkg = readUnifiedPackage(fset, nil, imports, input)
|
||||
return
|
||||
}
|
||||
|
||||
// readUnifiedPackage reads a package description from the given
|
||||
// unified IR export data decoder.
|
||||
func readUnifiedPackage(fset *token.FileSet, ctxt *types.Context, imports map[string]*types.Package, input pkgbits.PkgDecoder) *types.Package {
|
||||
pr := pkgReader{
|
||||
PkgDecoder: input,
|
||||
|
||||
fake: fakeFileSet{
|
||||
fset: fset,
|
||||
files: make(map[string]*fileInfo),
|
||||
},
|
||||
|
||||
ctxt: ctxt,
|
||||
imports: imports,
|
||||
|
||||
posBases: make([]string, input.NumElems(pkgbits.RelocPosBase)),
|
||||
pkgs: make([]*types.Package, input.NumElems(pkgbits.RelocPkg)),
|
||||
typs: make([]types.Type, input.NumElems(pkgbits.RelocType)),
|
||||
}
|
||||
defer pr.fake.setLines()
|
||||
|
||||
r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
|
||||
pkg := r.pkg()
|
||||
r.Bool() // has init
|
||||
|
||||
for i, n := 0, r.Len(); i < n; i++ {
|
||||
// As if r.obj(), but avoiding the Scope.Lookup call,
|
||||
// to avoid eager loading of imports.
|
||||
r.Sync(pkgbits.SyncObject)
|
||||
assert(!r.Bool())
|
||||
r.p.objIdx(r.Reloc(pkgbits.RelocObj))
|
||||
assert(r.Len() == 0)
|
||||
}
|
||||
|
||||
r.Sync(pkgbits.SyncEOF)
|
||||
|
||||
for _, fn := range pr.laterFns {
|
||||
fn()
|
||||
}
|
||||
|
||||
pkg.MarkComplete()
|
||||
return pkg
|
||||
}
|
||||
|
||||
// A reader holds the state for reading a single unified IR element
|
||||
// within a package.
|
||||
type reader struct {
|
||||
pkgbits.Decoder
|
||||
|
||||
p *pkgReader
|
||||
|
||||
dict *readerDict
|
||||
}
|
||||
|
||||
// A readerDict holds the state for type parameters that parameterize
|
||||
// the current unified IR element.
|
||||
type readerDict struct {
|
||||
// bounds is a slice of typeInfos corresponding to the underlying
|
||||
// bounds of the element's type parameters.
|
||||
bounds []typeInfo
|
||||
|
||||
// tparams is a slice of the constructed TypeParams for the element.
|
||||
tparams []*types.TypeParam
|
||||
|
||||
// devived is a slice of types derived from tparams, which may be
|
||||
// instantiated while reading the current element.
|
||||
derived []derivedInfo
|
||||
derivedTypes []types.Type // lazily instantiated from derived
|
||||
}
|
||||
|
||||
func (pr *pkgReader) newReader(k pkgbits.RelocKind, idx pkgbits.Index, marker pkgbits.SyncMarker) *reader {
|
||||
return &reader{
|
||||
Decoder: pr.NewDecoder(k, idx, marker),
|
||||
p: pr,
|
||||
}
|
||||
}
|
||||
|
||||
// @@@ Positions
|
||||
|
||||
func (r *reader) pos() token.Pos {
|
||||
r.Sync(pkgbits.SyncPos)
|
||||
if !r.Bool() {
|
||||
return token.NoPos
|
||||
}
|
||||
|
||||
// TODO(mdempsky): Delta encoding.
|
||||
posBase := r.posBase()
|
||||
line := r.Uint()
|
||||
col := r.Uint()
|
||||
return r.p.fake.pos(posBase, int(line), int(col))
|
||||
}
|
||||
|
||||
func (r *reader) posBase() string {
|
||||
return r.p.posBaseIdx(r.Reloc(pkgbits.RelocPosBase))
|
||||
}
|
||||
|
||||
func (pr *pkgReader) posBaseIdx(idx pkgbits.Index) string {
|
||||
if b := pr.posBases[idx]; b != "" {
|
||||
return b
|
||||
}
|
||||
|
||||
r := pr.newReader(pkgbits.RelocPosBase, idx, pkgbits.SyncPosBase)
|
||||
|
||||
// Within types2, position bases have a lot more details (e.g.,
|
||||
// keeping track of where //line directives appeared exactly).
|
||||
//
|
||||
// For go/types, we just track the file name.
|
||||
|
||||
filename := r.String()
|
||||
|
||||
if r.Bool() { // file base
|
||||
// Was: "b = token.NewTrimmedFileBase(filename, true)"
|
||||
} else { // line base
|
||||
pos := r.pos()
|
||||
line := r.Uint()
|
||||
col := r.Uint()
|
||||
|
||||
// Was: "b = token.NewLineBase(pos, filename, true, line, col)"
|
||||
_, _, _ = pos, line, col
|
||||
}
|
||||
|
||||
b := filename
|
||||
pr.posBases[idx] = b
|
||||
return b
|
||||
}
|
||||
|
||||
// @@@ Packages
|
||||
|
||||
func (r *reader) pkg() *types.Package {
|
||||
r.Sync(pkgbits.SyncPkg)
|
||||
return r.p.pkgIdx(r.Reloc(pkgbits.RelocPkg))
|
||||
}
|
||||
|
||||
func (pr *pkgReader) pkgIdx(idx pkgbits.Index) *types.Package {
|
||||
// TODO(mdempsky): Consider using some non-nil pointer to indicate
|
||||
// the universe scope, so we don't need to keep re-reading it.
|
||||
if pkg := pr.pkgs[idx]; pkg != nil {
|
||||
return pkg
|
||||
}
|
||||
|
||||
pkg := pr.newReader(pkgbits.RelocPkg, idx, pkgbits.SyncPkgDef).doPkg()
|
||||
pr.pkgs[idx] = pkg
|
||||
return pkg
|
||||
}
|
||||
|
||||
func (r *reader) doPkg() *types.Package {
|
||||
path := r.String()
|
||||
switch path {
|
||||
case "":
|
||||
path = r.p.PkgPath()
|
||||
case "builtin":
|
||||
return nil // universe
|
||||
case "unsafe":
|
||||
return types.Unsafe
|
||||
}
|
||||
|
||||
if pkg := r.p.imports[path]; pkg != nil {
|
||||
return pkg
|
||||
}
|
||||
|
||||
name := r.String()
|
||||
|
||||
pkg := types.NewPackage(path, name)
|
||||
r.p.imports[path] = pkg
|
||||
|
||||
imports := make([]*types.Package, r.Len())
|
||||
for i := range imports {
|
||||
imports[i] = r.pkg()
|
||||
}
|
||||
pkg.SetImports(imports)
|
||||
|
||||
return pkg
|
||||
}
|
||||
|
||||
// @@@ Types
|
||||
|
||||
func (r *reader) typ() types.Type {
|
||||
return r.p.typIdx(r.typInfo(), r.dict)
|
||||
}
|
||||
|
||||
func (r *reader) typInfo() typeInfo {
|
||||
r.Sync(pkgbits.SyncType)
|
||||
if r.Bool() {
|
||||
return typeInfo{idx: pkgbits.Index(r.Len()), derived: true}
|
||||
}
|
||||
return typeInfo{idx: r.Reloc(pkgbits.RelocType), derived: false}
|
||||
}
|
||||
|
||||
func (pr *pkgReader) typIdx(info typeInfo, dict *readerDict) types.Type {
|
||||
idx := info.idx
|
||||
var where *types.Type
|
||||
if info.derived {
|
||||
where = &dict.derivedTypes[idx]
|
||||
idx = dict.derived[idx].idx
|
||||
} else {
|
||||
where = &pr.typs[idx]
|
||||
}
|
||||
|
||||
if typ := *where; typ != nil {
|
||||
return typ
|
||||
}
|
||||
|
||||
r := pr.newReader(pkgbits.RelocType, idx, pkgbits.SyncTypeIdx)
|
||||
r.dict = dict
|
||||
|
||||
typ := r.doTyp()
|
||||
assert(typ != nil)
|
||||
|
||||
// See comment in pkgReader.typIdx explaining how this happens.
|
||||
if prev := *where; prev != nil {
|
||||
return prev
|
||||
}
|
||||
|
||||
*where = typ
|
||||
return typ
|
||||
}
|
||||
|
||||
func (r *reader) doTyp() (res types.Type) {
|
||||
switch tag := pkgbits.CodeType(r.Code(pkgbits.SyncType)); tag {
|
||||
default:
|
||||
errorf("unhandled type tag: %v", tag)
|
||||
panic("unreachable")
|
||||
|
||||
case pkgbits.TypeBasic:
|
||||
return types.Typ[r.Len()]
|
||||
|
||||
case pkgbits.TypeNamed:
|
||||
obj, targs := r.obj()
|
||||
name := obj.(*types.TypeName)
|
||||
if len(targs) != 0 {
|
||||
t, _ := types.Instantiate(r.p.ctxt, name.Type(), targs, false)
|
||||
return t
|
||||
}
|
||||
return name.Type()
|
||||
|
||||
case pkgbits.TypeTypeParam:
|
||||
return r.dict.tparams[r.Len()]
|
||||
|
||||
case pkgbits.TypeArray:
|
||||
len := int64(r.Uint64())
|
||||
return types.NewArray(r.typ(), len)
|
||||
case pkgbits.TypeChan:
|
||||
dir := types.ChanDir(r.Len())
|
||||
return types.NewChan(dir, r.typ())
|
||||
case pkgbits.TypeMap:
|
||||
return types.NewMap(r.typ(), r.typ())
|
||||
case pkgbits.TypePointer:
|
||||
return types.NewPointer(r.typ())
|
||||
case pkgbits.TypeSignature:
|
||||
return r.signature(nil, nil, nil)
|
||||
case pkgbits.TypeSlice:
|
||||
return types.NewSlice(r.typ())
|
||||
case pkgbits.TypeStruct:
|
||||
return r.structType()
|
||||
case pkgbits.TypeInterface:
|
||||
return r.interfaceType()
|
||||
case pkgbits.TypeUnion:
|
||||
return r.unionType()
|
||||
}
|
||||
}
|
||||
|
||||
func (r *reader) structType() *types.Struct {
|
||||
fields := make([]*types.Var, r.Len())
|
||||
var tags []string
|
||||
for i := range fields {
|
||||
pos := r.pos()
|
||||
pkg, name := r.selector()
|
||||
ftyp := r.typ()
|
||||
tag := r.String()
|
||||
embedded := r.Bool()
|
||||
|
||||
fields[i] = types.NewField(pos, pkg, name, ftyp, embedded)
|
||||
if tag != "" {
|
||||
for len(tags) < i {
|
||||
tags = append(tags, "")
|
||||
}
|
||||
tags = append(tags, tag)
|
||||
}
|
||||
}
|
||||
return types.NewStruct(fields, tags)
|
||||
}
|
||||
|
||||
func (r *reader) unionType() *types.Union {
|
||||
terms := make([]*types.Term, r.Len())
|
||||
for i := range terms {
|
||||
terms[i] = types.NewTerm(r.Bool(), r.typ())
|
||||
}
|
||||
return types.NewUnion(terms)
|
||||
}
|
||||
|
||||
func (r *reader) interfaceType() *types.Interface {
|
||||
methods := make([]*types.Func, r.Len())
|
||||
embeddeds := make([]types.Type, r.Len())
|
||||
implicit := len(methods) == 0 && len(embeddeds) == 1 && r.Bool()
|
||||
|
||||
for i := range methods {
|
||||
pos := r.pos()
|
||||
pkg, name := r.selector()
|
||||
mtyp := r.signature(nil, nil, nil)
|
||||
methods[i] = types.NewFunc(pos, pkg, name, mtyp)
|
||||
}
|
||||
|
||||
for i := range embeddeds {
|
||||
embeddeds[i] = r.typ()
|
||||
}
|
||||
|
||||
iface := types.NewInterfaceType(methods, embeddeds)
|
||||
if implicit {
|
||||
iface.MarkImplicit()
|
||||
}
|
||||
return iface
|
||||
}
|
||||
|
||||
func (r *reader) signature(recv *types.Var, rtparams, tparams []*types.TypeParam) *types.Signature {
|
||||
r.Sync(pkgbits.SyncSignature)
|
||||
|
||||
params := r.params()
|
||||
results := r.params()
|
||||
variadic := r.Bool()
|
||||
|
||||
return types.NewSignatureType(recv, rtparams, tparams, params, results, variadic)
|
||||
}
|
||||
|
||||
func (r *reader) params() *types.Tuple {
|
||||
r.Sync(pkgbits.SyncParams)
|
||||
|
||||
params := make([]*types.Var, r.Len())
|
||||
for i := range params {
|
||||
params[i] = r.param()
|
||||
}
|
||||
|
||||
return types.NewTuple(params...)
|
||||
}
|
||||
|
||||
func (r *reader) param() *types.Var {
|
||||
r.Sync(pkgbits.SyncParam)
|
||||
|
||||
pos := r.pos()
|
||||
pkg, name := r.localIdent()
|
||||
typ := r.typ()
|
||||
|
||||
return types.NewParam(pos, pkg, name, typ)
|
||||
}
|
||||
|
||||
// @@@ Objects
|
||||
|
||||
func (r *reader) obj() (types.Object, []types.Type) {
|
||||
r.Sync(pkgbits.SyncObject)
|
||||
|
||||
assert(!r.Bool())
|
||||
|
||||
pkg, name := r.p.objIdx(r.Reloc(pkgbits.RelocObj))
|
||||
obj := pkgScope(pkg).Lookup(name)
|
||||
|
||||
targs := make([]types.Type, r.Len())
|
||||
for i := range targs {
|
||||
targs[i] = r.typ()
|
||||
}
|
||||
|
||||
return obj, targs
|
||||
}
|
||||
|
||||
func (pr *pkgReader) objIdx(idx pkgbits.Index) (*types.Package, string) {
|
||||
rname := pr.newReader(pkgbits.RelocName, idx, pkgbits.SyncObject1)
|
||||
|
||||
objPkg, objName := rname.qualifiedIdent()
|
||||
assert(objName != "")
|
||||
|
||||
tag := pkgbits.CodeObj(rname.Code(pkgbits.SyncCodeObj))
|
||||
|
||||
if tag == pkgbits.ObjStub {
|
||||
assert(objPkg == nil || objPkg == types.Unsafe)
|
||||
return objPkg, objName
|
||||
}
|
||||
|
||||
if objPkg.Scope().Lookup(objName) == nil {
|
||||
dict := pr.objDictIdx(idx)
|
||||
|
||||
r := pr.newReader(pkgbits.RelocObj, idx, pkgbits.SyncObject1)
|
||||
r.dict = dict
|
||||
|
||||
declare := func(obj types.Object) {
|
||||
objPkg.Scope().Insert(obj)
|
||||
}
|
||||
|
||||
switch tag {
|
||||
default:
|
||||
panic("weird")
|
||||
|
||||
case pkgbits.ObjAlias:
|
||||
pos := r.pos()
|
||||
typ := r.typ()
|
||||
declare(types.NewTypeName(pos, objPkg, objName, typ))
|
||||
|
||||
case pkgbits.ObjConst:
|
||||
pos := r.pos()
|
||||
typ := r.typ()
|
||||
val := r.Value()
|
||||
declare(types.NewConst(pos, objPkg, objName, typ, val))
|
||||
|
||||
case pkgbits.ObjFunc:
|
||||
pos := r.pos()
|
||||
tparams := r.typeParamNames()
|
||||
sig := r.signature(nil, nil, tparams)
|
||||
declare(types.NewFunc(pos, objPkg, objName, sig))
|
||||
|
||||
case pkgbits.ObjType:
|
||||
pos := r.pos()
|
||||
|
||||
obj := types.NewTypeName(pos, objPkg, objName, nil)
|
||||
named := types.NewNamed(obj, nil, nil)
|
||||
declare(obj)
|
||||
|
||||
named.SetTypeParams(r.typeParamNames())
|
||||
|
||||
// TODO(mdempsky): Rewrite receiver types to underlying is an
|
||||
// Interface? The go/types importer does this (I think because
|
||||
// unit tests expected that), but cmd/compile doesn't care
|
||||
// about it, so maybe we can avoid worrying about that here.
|
||||
rhs := r.typ()
|
||||
r.p.later(func() {
|
||||
underlying := rhs.Underlying()
|
||||
named.SetUnderlying(underlying)
|
||||
})
|
||||
|
||||
for i, n := 0, r.Len(); i < n; i++ {
|
||||
named.AddMethod(r.method())
|
||||
}
|
||||
|
||||
case pkgbits.ObjVar:
|
||||
pos := r.pos()
|
||||
typ := r.typ()
|
||||
declare(types.NewVar(pos, objPkg, objName, typ))
|
||||
}
|
||||
}
|
||||
|
||||
return objPkg, objName
|
||||
}
|
||||
|
||||
func (pr *pkgReader) objDictIdx(idx pkgbits.Index) *readerDict {
|
||||
r := pr.newReader(pkgbits.RelocObjDict, idx, pkgbits.SyncObject1)
|
||||
|
||||
var dict readerDict
|
||||
|
||||
if implicits := r.Len(); implicits != 0 {
|
||||
errorf("unexpected object with %v implicit type parameter(s)", implicits)
|
||||
}
|
||||
|
||||
dict.bounds = make([]typeInfo, r.Len())
|
||||
for i := range dict.bounds {
|
||||
dict.bounds[i] = r.typInfo()
|
||||
}
|
||||
|
||||
dict.derived = make([]derivedInfo, r.Len())
|
||||
dict.derivedTypes = make([]types.Type, len(dict.derived))
|
||||
for i := range dict.derived {
|
||||
dict.derived[i] = derivedInfo{r.Reloc(pkgbits.RelocType), r.Bool()}
|
||||
}
|
||||
|
||||
// function references follow, but reader doesn't need those
|
||||
|
||||
return &dict
|
||||
}
|
||||
|
||||
func (r *reader) typeParamNames() []*types.TypeParam {
|
||||
r.Sync(pkgbits.SyncTypeParamNames)
|
||||
|
||||
// Note: This code assumes it only processes objects without
|
||||
// implement type parameters. This is currently fine, because
|
||||
// reader is only used to read in exported declarations, which are
|
||||
// always package scoped.
|
||||
|
||||
if len(r.dict.bounds) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Careful: Type parameter lists may have cycles. To allow for this,
|
||||
// we construct the type parameter list in two passes: first we
|
||||
// create all the TypeNames and TypeParams, then we construct and
|
||||
// set the bound type.
|
||||
|
||||
r.dict.tparams = make([]*types.TypeParam, len(r.dict.bounds))
|
||||
for i := range r.dict.bounds {
|
||||
pos := r.pos()
|
||||
pkg, name := r.localIdent()
|
||||
|
||||
tname := types.NewTypeName(pos, pkg, name, nil)
|
||||
r.dict.tparams[i] = types.NewTypeParam(tname, nil)
|
||||
}
|
||||
|
||||
typs := make([]types.Type, len(r.dict.bounds))
|
||||
for i, bound := range r.dict.bounds {
|
||||
typs[i] = r.p.typIdx(bound, r.dict)
|
||||
}
|
||||
|
||||
// TODO(mdempsky): This is subtle, elaborate further.
|
||||
//
|
||||
// We have to save tparams outside of the closure, because
|
||||
// typeParamNames() can be called multiple times with the same
|
||||
// dictionary instance.
|
||||
//
|
||||
// Also, this needs to happen later to make sure SetUnderlying has
|
||||
// been called.
|
||||
//
|
||||
// TODO(mdempsky): Is it safe to have a single "later" slice or do
|
||||
// we need to have multiple passes? See comments on CL 386002 and
|
||||
// go.dev/issue/52104.
|
||||
tparams := r.dict.tparams
|
||||
r.p.later(func() {
|
||||
for i, typ := range typs {
|
||||
tparams[i].SetConstraint(typ)
|
||||
}
|
||||
})
|
||||
|
||||
return r.dict.tparams
|
||||
}
|
||||
|
||||
func (r *reader) method() *types.Func {
|
||||
r.Sync(pkgbits.SyncMethod)
|
||||
pos := r.pos()
|
||||
pkg, name := r.selector()
|
||||
|
||||
rparams := r.typeParamNames()
|
||||
sig := r.signature(r.param(), rparams, nil)
|
||||
|
||||
_ = r.pos() // TODO(mdempsky): Remove; this is a hacker for linker.go.
|
||||
return types.NewFunc(pos, pkg, name, sig)
|
||||
}
|
||||
|
||||
func (r *reader) qualifiedIdent() (*types.Package, string) { return r.ident(pkgbits.SyncSym) }
|
||||
func (r *reader) localIdent() (*types.Package, string) { return r.ident(pkgbits.SyncLocalIdent) }
|
||||
func (r *reader) selector() (*types.Package, string) { return r.ident(pkgbits.SyncSelector) }
|
||||
|
||||
func (r *reader) ident(marker pkgbits.SyncMarker) (*types.Package, string) {
|
||||
r.Sync(marker)
|
||||
return r.pkg(), r.String()
|
||||
}
|
||||
|
||||
// pkgScope returns pkg.Scope().
|
||||
// If pkg is nil, it returns types.Universe instead.
|
||||
//
|
||||
// TODO(mdempsky): Remove after x/tools can depend on Go 1.19.
|
||||
func pkgScope(pkg *types.Package) *types.Scope {
|
||||
if pkg != nil {
|
||||
return pkg.Scope()
|
||||
}
|
||||
return types.Universe
|
||||
}
|
77
vendor/golang.org/x/tools/go/internal/pkgbits/codes.go
generated
vendored
Normal file
77
vendor/golang.org/x/tools/go/internal/pkgbits/codes.go
generated
vendored
Normal file
|
@ -0,0 +1,77 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
// A Code is an enum value that can be encoded into bitstreams.
|
||||
//
|
||||
// Code types are preferable for enum types, because they allow
|
||||
// Decoder to detect desyncs.
|
||||
type Code interface {
|
||||
// Marker returns the SyncMarker for the Code's dynamic type.
|
||||
Marker() SyncMarker
|
||||
|
||||
// Value returns the Code's ordinal value.
|
||||
Value() int
|
||||
}
|
||||
|
||||
// A CodeVal distinguishes among go/constant.Value encodings.
|
||||
type CodeVal int
|
||||
|
||||
func (c CodeVal) Marker() SyncMarker { return SyncVal }
|
||||
func (c CodeVal) Value() int { return int(c) }
|
||||
|
||||
// Note: These values are public and cannot be changed without
|
||||
// updating the go/types importers.
|
||||
|
||||
const (
|
||||
ValBool CodeVal = iota
|
||||
ValString
|
||||
ValInt64
|
||||
ValBigInt
|
||||
ValBigRat
|
||||
ValBigFloat
|
||||
)
|
||||
|
||||
// A CodeType distinguishes among go/types.Type encodings.
|
||||
type CodeType int
|
||||
|
||||
func (c CodeType) Marker() SyncMarker { return SyncType }
|
||||
func (c CodeType) Value() int { return int(c) }
|
||||
|
||||
// Note: These values are public and cannot be changed without
|
||||
// updating the go/types importers.
|
||||
|
||||
const (
|
||||
TypeBasic CodeType = iota
|
||||
TypeNamed
|
||||
TypePointer
|
||||
TypeSlice
|
||||
TypeArray
|
||||
TypeChan
|
||||
TypeMap
|
||||
TypeSignature
|
||||
TypeStruct
|
||||
TypeInterface
|
||||
TypeUnion
|
||||
TypeTypeParam
|
||||
)
|
||||
|
||||
// A CodeObj distinguishes among go/types.Object encodings.
|
||||
type CodeObj int
|
||||
|
||||
func (c CodeObj) Marker() SyncMarker { return SyncCodeObj }
|
||||
func (c CodeObj) Value() int { return int(c) }
|
||||
|
||||
// Note: These values are public and cannot be changed without
|
||||
// updating the go/types importers.
|
||||
|
||||
const (
|
||||
ObjAlias CodeObj = iota
|
||||
ObjConst
|
||||
ObjType
|
||||
ObjFunc
|
||||
ObjVar
|
||||
ObjStub
|
||||
)
|
433
vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go
generated
vendored
Normal file
433
vendor/golang.org/x/tools/go/internal/pkgbits/decoder.go
generated
vendored
Normal file
|
@ -0,0 +1,433 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"go/constant"
|
||||
"go/token"
|
||||
"math/big"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// A PkgDecoder provides methods for decoding a package's Unified IR
|
||||
// export data.
|
||||
type PkgDecoder struct {
|
||||
// version is the file format version.
|
||||
version uint32
|
||||
|
||||
// sync indicates whether the file uses sync markers.
|
||||
sync bool
|
||||
|
||||
// pkgPath is the package path for the package to be decoded.
|
||||
//
|
||||
// TODO(mdempsky): Remove; unneeded since CL 391014.
|
||||
pkgPath string
|
||||
|
||||
// elemData is the full data payload of the encoded package.
|
||||
// Elements are densely and contiguously packed together.
|
||||
//
|
||||
// The last 8 bytes of elemData are the package fingerprint.
|
||||
elemData string
|
||||
|
||||
// elemEnds stores the byte-offset end positions of element
|
||||
// bitstreams within elemData.
|
||||
//
|
||||
// For example, element I's bitstream data starts at elemEnds[I-1]
|
||||
// (or 0, if I==0) and ends at elemEnds[I].
|
||||
//
|
||||
// Note: elemEnds is indexed by absolute indices, not
|
||||
// section-relative indices.
|
||||
elemEnds []uint32
|
||||
|
||||
// elemEndsEnds stores the index-offset end positions of relocation
|
||||
// sections within elemEnds.
|
||||
//
|
||||
// For example, section K's end positions start at elemEndsEnds[K-1]
|
||||
// (or 0, if K==0) and end at elemEndsEnds[K].
|
||||
elemEndsEnds [numRelocs]uint32
|
||||
}
|
||||
|
||||
// PkgPath returns the package path for the package
|
||||
//
|
||||
// TODO(mdempsky): Remove; unneeded since CL 391014.
|
||||
func (pr *PkgDecoder) PkgPath() string { return pr.pkgPath }
|
||||
|
||||
// SyncMarkers reports whether pr uses sync markers.
|
||||
func (pr *PkgDecoder) SyncMarkers() bool { return pr.sync }
|
||||
|
||||
// NewPkgDecoder returns a PkgDecoder initialized to read the Unified
|
||||
// IR export data from input. pkgPath is the package path for the
|
||||
// compilation unit that produced the export data.
|
||||
//
|
||||
// TODO(mdempsky): Remove pkgPath parameter; unneeded since CL 391014.
|
||||
func NewPkgDecoder(pkgPath, input string) PkgDecoder {
|
||||
pr := PkgDecoder{
|
||||
pkgPath: pkgPath,
|
||||
}
|
||||
|
||||
// TODO(mdempsky): Implement direct indexing of input string to
|
||||
// avoid copying the position information.
|
||||
|
||||
r := strings.NewReader(input)
|
||||
|
||||
assert(binary.Read(r, binary.LittleEndian, &pr.version) == nil)
|
||||
|
||||
switch pr.version {
|
||||
default:
|
||||
panic(fmt.Errorf("unsupported version: %v", pr.version))
|
||||
case 0:
|
||||
// no flags
|
||||
case 1:
|
||||
var flags uint32
|
||||
assert(binary.Read(r, binary.LittleEndian, &flags) == nil)
|
||||
pr.sync = flags&flagSyncMarkers != 0
|
||||
}
|
||||
|
||||
assert(binary.Read(r, binary.LittleEndian, pr.elemEndsEnds[:]) == nil)
|
||||
|
||||
pr.elemEnds = make([]uint32, pr.elemEndsEnds[len(pr.elemEndsEnds)-1])
|
||||
assert(binary.Read(r, binary.LittleEndian, pr.elemEnds[:]) == nil)
|
||||
|
||||
pos, err := r.Seek(0, os.SEEK_CUR)
|
||||
assert(err == nil)
|
||||
|
||||
pr.elemData = input[pos:]
|
||||
assert(len(pr.elemData)-8 == int(pr.elemEnds[len(pr.elemEnds)-1]))
|
||||
|
||||
return pr
|
||||
}
|
||||
|
||||
// NumElems returns the number of elements in section k.
|
||||
func (pr *PkgDecoder) NumElems(k RelocKind) int {
|
||||
count := int(pr.elemEndsEnds[k])
|
||||
if k > 0 {
|
||||
count -= int(pr.elemEndsEnds[k-1])
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
||||
// TotalElems returns the total number of elements across all sections.
|
||||
func (pr *PkgDecoder) TotalElems() int {
|
||||
return len(pr.elemEnds)
|
||||
}
|
||||
|
||||
// Fingerprint returns the package fingerprint.
|
||||
func (pr *PkgDecoder) Fingerprint() [8]byte {
|
||||
var fp [8]byte
|
||||
copy(fp[:], pr.elemData[len(pr.elemData)-8:])
|
||||
return fp
|
||||
}
|
||||
|
||||
// AbsIdx returns the absolute index for the given (section, index)
|
||||
// pair.
|
||||
func (pr *PkgDecoder) AbsIdx(k RelocKind, idx Index) int {
|
||||
absIdx := int(idx)
|
||||
if k > 0 {
|
||||
absIdx += int(pr.elemEndsEnds[k-1])
|
||||
}
|
||||
if absIdx >= int(pr.elemEndsEnds[k]) {
|
||||
errorf("%v:%v is out of bounds; %v", k, idx, pr.elemEndsEnds)
|
||||
}
|
||||
return absIdx
|
||||
}
|
||||
|
||||
// DataIdx returns the raw element bitstream for the given (section,
|
||||
// index) pair.
|
||||
func (pr *PkgDecoder) DataIdx(k RelocKind, idx Index) string {
|
||||
absIdx := pr.AbsIdx(k, idx)
|
||||
|
||||
var start uint32
|
||||
if absIdx > 0 {
|
||||
start = pr.elemEnds[absIdx-1]
|
||||
}
|
||||
end := pr.elemEnds[absIdx]
|
||||
|
||||
return pr.elemData[start:end]
|
||||
}
|
||||
|
||||
// StringIdx returns the string value for the given string index.
|
||||
func (pr *PkgDecoder) StringIdx(idx Index) string {
|
||||
return pr.DataIdx(RelocString, idx)
|
||||
}
|
||||
|
||||
// NewDecoder returns a Decoder for the given (section, index) pair,
|
||||
// and decodes the given SyncMarker from the element bitstream.
|
||||
func (pr *PkgDecoder) NewDecoder(k RelocKind, idx Index, marker SyncMarker) Decoder {
|
||||
r := pr.NewDecoderRaw(k, idx)
|
||||
r.Sync(marker)
|
||||
return r
|
||||
}
|
||||
|
||||
// NewDecoderRaw returns a Decoder for the given (section, index) pair.
|
||||
//
|
||||
// Most callers should use NewDecoder instead.
|
||||
func (pr *PkgDecoder) NewDecoderRaw(k RelocKind, idx Index) Decoder {
|
||||
r := Decoder{
|
||||
common: pr,
|
||||
k: k,
|
||||
Idx: idx,
|
||||
}
|
||||
|
||||
// TODO(mdempsky) r.data.Reset(...) after #44505 is resolved.
|
||||
r.Data = *strings.NewReader(pr.DataIdx(k, idx))
|
||||
|
||||
r.Sync(SyncRelocs)
|
||||
r.Relocs = make([]RelocEnt, r.Len())
|
||||
for i := range r.Relocs {
|
||||
r.Sync(SyncReloc)
|
||||
r.Relocs[i] = RelocEnt{RelocKind(r.Len()), Index(r.Len())}
|
||||
}
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
// A Decoder provides methods for decoding an individual element's
|
||||
// bitstream data.
|
||||
type Decoder struct {
|
||||
common *PkgDecoder
|
||||
|
||||
Relocs []RelocEnt
|
||||
Data strings.Reader
|
||||
|
||||
k RelocKind
|
||||
Idx Index
|
||||
}
|
||||
|
||||
func (r *Decoder) checkErr(err error) {
|
||||
if err != nil {
|
||||
errorf("unexpected decoding error: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Decoder) rawUvarint() uint64 {
|
||||
x, err := binary.ReadUvarint(&r.Data)
|
||||
r.checkErr(err)
|
||||
return x
|
||||
}
|
||||
|
||||
func (r *Decoder) rawVarint() int64 {
|
||||
ux := r.rawUvarint()
|
||||
|
||||
// Zig-zag decode.
|
||||
x := int64(ux >> 1)
|
||||
if ux&1 != 0 {
|
||||
x = ^x
|
||||
}
|
||||
return x
|
||||
}
|
||||
|
||||
func (r *Decoder) rawReloc(k RelocKind, idx int) Index {
|
||||
e := r.Relocs[idx]
|
||||
assert(e.Kind == k)
|
||||
return e.Idx
|
||||
}
|
||||
|
||||
// Sync decodes a sync marker from the element bitstream and asserts
|
||||
// that it matches the expected marker.
|
||||
//
|
||||
// If r.common.sync is false, then Sync is a no-op.
|
||||
func (r *Decoder) Sync(mWant SyncMarker) {
|
||||
if !r.common.sync {
|
||||
return
|
||||
}
|
||||
|
||||
pos, _ := r.Data.Seek(0, os.SEEK_CUR) // TODO(mdempsky): io.SeekCurrent after #44505 is resolved
|
||||
mHave := SyncMarker(r.rawUvarint())
|
||||
writerPCs := make([]int, r.rawUvarint())
|
||||
for i := range writerPCs {
|
||||
writerPCs[i] = int(r.rawUvarint())
|
||||
}
|
||||
|
||||
if mHave == mWant {
|
||||
return
|
||||
}
|
||||
|
||||
// There's some tension here between printing:
|
||||
//
|
||||
// (1) full file paths that tools can recognize (e.g., so emacs
|
||||
// hyperlinks the "file:line" text for easy navigation), or
|
||||
//
|
||||
// (2) short file paths that are easier for humans to read (e.g., by
|
||||
// omitting redundant or irrelevant details, so it's easier to
|
||||
// focus on the useful bits that remain).
|
||||
//
|
||||
// The current formatting favors the former, as it seems more
|
||||
// helpful in practice. But perhaps the formatting could be improved
|
||||
// to better address both concerns. For example, use relative file
|
||||
// paths if they would be shorter, or rewrite file paths to contain
|
||||
// "$GOROOT" (like objabi.AbsFile does) if tools can be taught how
|
||||
// to reliably expand that again.
|
||||
|
||||
fmt.Printf("export data desync: package %q, section %v, index %v, offset %v\n", r.common.pkgPath, r.k, r.Idx, pos)
|
||||
|
||||
fmt.Printf("\nfound %v, written at:\n", mHave)
|
||||
if len(writerPCs) == 0 {
|
||||
fmt.Printf("\t[stack trace unavailable; recompile package %q with -d=syncframes]\n", r.common.pkgPath)
|
||||
}
|
||||
for _, pc := range writerPCs {
|
||||
fmt.Printf("\t%s\n", r.common.StringIdx(r.rawReloc(RelocString, pc)))
|
||||
}
|
||||
|
||||
fmt.Printf("\nexpected %v, reading at:\n", mWant)
|
||||
var readerPCs [32]uintptr // TODO(mdempsky): Dynamically size?
|
||||
n := runtime.Callers(2, readerPCs[:])
|
||||
for _, pc := range fmtFrames(readerPCs[:n]...) {
|
||||
fmt.Printf("\t%s\n", pc)
|
||||
}
|
||||
|
||||
// We already printed a stack trace for the reader, so now we can
|
||||
// simply exit. Printing a second one with panic or base.Fatalf
|
||||
// would just be noise.
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
// Bool decodes and returns a bool value from the element bitstream.
|
||||
func (r *Decoder) Bool() bool {
|
||||
r.Sync(SyncBool)
|
||||
x, err := r.Data.ReadByte()
|
||||
r.checkErr(err)
|
||||
assert(x < 2)
|
||||
return x != 0
|
||||
}
|
||||
|
||||
// Int64 decodes and returns an int64 value from the element bitstream.
|
||||
func (r *Decoder) Int64() int64 {
|
||||
r.Sync(SyncInt64)
|
||||
return r.rawVarint()
|
||||
}
|
||||
|
||||
// Int64 decodes and returns a uint64 value from the element bitstream.
|
||||
func (r *Decoder) Uint64() uint64 {
|
||||
r.Sync(SyncUint64)
|
||||
return r.rawUvarint()
|
||||
}
|
||||
|
||||
// Len decodes and returns a non-negative int value from the element bitstream.
|
||||
func (r *Decoder) Len() int { x := r.Uint64(); v := int(x); assert(uint64(v) == x); return v }
|
||||
|
||||
// Int decodes and returns an int value from the element bitstream.
|
||||
func (r *Decoder) Int() int { x := r.Int64(); v := int(x); assert(int64(v) == x); return v }
|
||||
|
||||
// Uint decodes and returns a uint value from the element bitstream.
|
||||
func (r *Decoder) Uint() uint { x := r.Uint64(); v := uint(x); assert(uint64(v) == x); return v }
|
||||
|
||||
// Code decodes a Code value from the element bitstream and returns
|
||||
// its ordinal value. It's the caller's responsibility to convert the
|
||||
// result to an appropriate Code type.
|
||||
//
|
||||
// TODO(mdempsky): Ideally this method would have signature "Code[T
|
||||
// Code] T" instead, but we don't allow generic methods and the
|
||||
// compiler can't depend on generics yet anyway.
|
||||
func (r *Decoder) Code(mark SyncMarker) int {
|
||||
r.Sync(mark)
|
||||
return r.Len()
|
||||
}
|
||||
|
||||
// Reloc decodes a relocation of expected section k from the element
|
||||
// bitstream and returns an index to the referenced element.
|
||||
func (r *Decoder) Reloc(k RelocKind) Index {
|
||||
r.Sync(SyncUseReloc)
|
||||
return r.rawReloc(k, r.Len())
|
||||
}
|
||||
|
||||
// String decodes and returns a string value from the element
|
||||
// bitstream.
|
||||
func (r *Decoder) String() string {
|
||||
r.Sync(SyncString)
|
||||
return r.common.StringIdx(r.Reloc(RelocString))
|
||||
}
|
||||
|
||||
// Strings decodes and returns a variable-length slice of strings from
|
||||
// the element bitstream.
|
||||
func (r *Decoder) Strings() []string {
|
||||
res := make([]string, r.Len())
|
||||
for i := range res {
|
||||
res[i] = r.String()
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
// Value decodes and returns a constant.Value from the element
|
||||
// bitstream.
|
||||
func (r *Decoder) Value() constant.Value {
|
||||
r.Sync(SyncValue)
|
||||
isComplex := r.Bool()
|
||||
val := r.scalar()
|
||||
if isComplex {
|
||||
val = constant.BinaryOp(val, token.ADD, constant.MakeImag(r.scalar()))
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
func (r *Decoder) scalar() constant.Value {
|
||||
switch tag := CodeVal(r.Code(SyncVal)); tag {
|
||||
default:
|
||||
panic(fmt.Errorf("unexpected scalar tag: %v", tag))
|
||||
|
||||
case ValBool:
|
||||
return constant.MakeBool(r.Bool())
|
||||
case ValString:
|
||||
return constant.MakeString(r.String())
|
||||
case ValInt64:
|
||||
return constant.MakeInt64(r.Int64())
|
||||
case ValBigInt:
|
||||
return constant.Make(r.bigInt())
|
||||
case ValBigRat:
|
||||
num := r.bigInt()
|
||||
denom := r.bigInt()
|
||||
return constant.Make(new(big.Rat).SetFrac(num, denom))
|
||||
case ValBigFloat:
|
||||
return constant.Make(r.bigFloat())
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Decoder) bigInt() *big.Int {
|
||||
v := new(big.Int).SetBytes([]byte(r.String()))
|
||||
if r.Bool() {
|
||||
v.Neg(v)
|
||||
}
|
||||
return v
|
||||
}
|
||||
|
||||
func (r *Decoder) bigFloat() *big.Float {
|
||||
v := new(big.Float).SetPrec(512)
|
||||
assert(v.UnmarshalText([]byte(r.String())) == nil)
|
||||
return v
|
||||
}
|
||||
|
||||
// @@@ Helpers
|
||||
|
||||
// TODO(mdempsky): These should probably be removed. I think they're a
|
||||
// smell that the export data format is not yet quite right.
|
||||
|
||||
// PeekPkgPath returns the package path for the specified package
|
||||
// index.
|
||||
func (pr *PkgDecoder) PeekPkgPath(idx Index) string {
|
||||
r := pr.NewDecoder(RelocPkg, idx, SyncPkgDef)
|
||||
path := r.String()
|
||||
if path == "" {
|
||||
path = pr.pkgPath
|
||||
}
|
||||
return path
|
||||
}
|
||||
|
||||
// PeekObj returns the package path, object name, and CodeObj for the
|
||||
// specified object index.
|
||||
func (pr *PkgDecoder) PeekObj(idx Index) (string, string, CodeObj) {
|
||||
r := pr.NewDecoder(RelocName, idx, SyncObject1)
|
||||
r.Sync(SyncSym)
|
||||
r.Sync(SyncPkg)
|
||||
path := pr.PeekPkgPath(r.Reloc(RelocPkg))
|
||||
name := r.String()
|
||||
assert(name != "")
|
||||
|
||||
tag := CodeObj(r.Code(SyncCodeObj))
|
||||
|
||||
return path, name, tag
|
||||
}
|
32
vendor/golang.org/x/tools/go/internal/pkgbits/doc.go
generated
vendored
Normal file
32
vendor/golang.org/x/tools/go/internal/pkgbits/doc.go
generated
vendored
Normal file
|
@ -0,0 +1,32 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
// Package pkgbits implements low-level coding abstractions for
|
||||
// Unified IR's export data format.
|
||||
//
|
||||
// At a low-level, a package is a collection of bitstream elements.
|
||||
// Each element has a "kind" and a dense, non-negative index.
|
||||
// Elements can be randomly accessed given their kind and index.
|
||||
//
|
||||
// Individual elements are sequences of variable-length values (e.g.,
|
||||
// integers, booleans, strings, go/constant values, cross-references
|
||||
// to other elements). Package pkgbits provides APIs for encoding and
|
||||
// decoding these low-level values, but the details of mapping
|
||||
// higher-level Go constructs into elements is left to higher-level
|
||||
// abstractions.
|
||||
//
|
||||
// Elements may cross-reference each other with "relocations." For
|
||||
// example, an element representing a pointer type has a relocation
|
||||
// referring to the element type.
|
||||
//
|
||||
// Go constructs may be composed as a constellation of multiple
|
||||
// elements. For example, a declared function may have one element to
|
||||
// describe the object (e.g., its name, type, position), and a
|
||||
// separate element to describe its function body. This allows readers
|
||||
// some flexibility in efficiently seeking or re-reading data (e.g.,
|
||||
// inlining requires re-reading the function body for each inlined
|
||||
// call, without needing to re-read the object-level details).
|
||||
//
|
||||
// This is a copy of internal/pkgbits in the Go implementation.
|
||||
package pkgbits
|
379
vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go
generated
vendored
Normal file
379
vendor/golang.org/x/tools/go/internal/pkgbits/encoder.go
generated
vendored
Normal file
|
@ -0,0 +1,379 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"encoding/binary"
|
||||
"go/constant"
|
||||
"io"
|
||||
"math/big"
|
||||
"runtime"
|
||||
)
|
||||
|
||||
// currentVersion is the current version number.
|
||||
//
|
||||
// - v0: initial prototype
|
||||
//
|
||||
// - v1: adds the flags uint32 word
|
||||
const currentVersion uint32 = 1
|
||||
|
||||
// A PkgEncoder provides methods for encoding a package's Unified IR
|
||||
// export data.
|
||||
type PkgEncoder struct {
|
||||
// elems holds the bitstream for previously encoded elements.
|
||||
elems [numRelocs][]string
|
||||
|
||||
// stringsIdx maps previously encoded strings to their index within
|
||||
// the RelocString section, to allow deduplication. That is,
|
||||
// elems[RelocString][stringsIdx[s]] == s (if present).
|
||||
stringsIdx map[string]Index
|
||||
|
||||
// syncFrames is the number of frames to write at each sync
|
||||
// marker. A negative value means sync markers are omitted.
|
||||
syncFrames int
|
||||
}
|
||||
|
||||
// SyncMarkers reports whether pw uses sync markers.
|
||||
func (pw *PkgEncoder) SyncMarkers() bool { return pw.syncFrames >= 0 }
|
||||
|
||||
// NewPkgEncoder returns an initialized PkgEncoder.
|
||||
//
|
||||
// syncFrames is the number of caller frames that should be serialized
|
||||
// at Sync points. Serializing additional frames results in larger
|
||||
// export data files, but can help diagnosing desync errors in
|
||||
// higher-level Unified IR reader/writer code. If syncFrames is
|
||||
// negative, then sync markers are omitted entirely.
|
||||
func NewPkgEncoder(syncFrames int) PkgEncoder {
|
||||
return PkgEncoder{
|
||||
stringsIdx: make(map[string]Index),
|
||||
syncFrames: syncFrames,
|
||||
}
|
||||
}
|
||||
|
||||
// DumpTo writes the package's encoded data to out0 and returns the
|
||||
// package fingerprint.
|
||||
func (pw *PkgEncoder) DumpTo(out0 io.Writer) (fingerprint [8]byte) {
|
||||
h := md5.New()
|
||||
out := io.MultiWriter(out0, h)
|
||||
|
||||
writeUint32 := func(x uint32) {
|
||||
assert(binary.Write(out, binary.LittleEndian, x) == nil)
|
||||
}
|
||||
|
||||
writeUint32(currentVersion)
|
||||
|
||||
var flags uint32
|
||||
if pw.SyncMarkers() {
|
||||
flags |= flagSyncMarkers
|
||||
}
|
||||
writeUint32(flags)
|
||||
|
||||
// Write elemEndsEnds.
|
||||
var sum uint32
|
||||
for _, elems := range &pw.elems {
|
||||
sum += uint32(len(elems))
|
||||
writeUint32(sum)
|
||||
}
|
||||
|
||||
// Write elemEnds.
|
||||
sum = 0
|
||||
for _, elems := range &pw.elems {
|
||||
for _, elem := range elems {
|
||||
sum += uint32(len(elem))
|
||||
writeUint32(sum)
|
||||
}
|
||||
}
|
||||
|
||||
// Write elemData.
|
||||
for _, elems := range &pw.elems {
|
||||
for _, elem := range elems {
|
||||
_, err := io.WriteString(out, elem)
|
||||
assert(err == nil)
|
||||
}
|
||||
}
|
||||
|
||||
// Write fingerprint.
|
||||
copy(fingerprint[:], h.Sum(nil))
|
||||
_, err := out0.Write(fingerprint[:])
|
||||
assert(err == nil)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// StringIdx adds a string value to the strings section, if not
|
||||
// already present, and returns its index.
|
||||
func (pw *PkgEncoder) StringIdx(s string) Index {
|
||||
if idx, ok := pw.stringsIdx[s]; ok {
|
||||
assert(pw.elems[RelocString][idx] == s)
|
||||
return idx
|
||||
}
|
||||
|
||||
idx := Index(len(pw.elems[RelocString]))
|
||||
pw.elems[RelocString] = append(pw.elems[RelocString], s)
|
||||
pw.stringsIdx[s] = idx
|
||||
return idx
|
||||
}
|
||||
|
||||
// NewEncoder returns an Encoder for a new element within the given
|
||||
// section, and encodes the given SyncMarker as the start of the
|
||||
// element bitstream.
|
||||
func (pw *PkgEncoder) NewEncoder(k RelocKind, marker SyncMarker) Encoder {
|
||||
e := pw.NewEncoderRaw(k)
|
||||
e.Sync(marker)
|
||||
return e
|
||||
}
|
||||
|
||||
// NewEncoderRaw returns an Encoder for a new element within the given
|
||||
// section.
|
||||
//
|
||||
// Most callers should use NewEncoder instead.
|
||||
func (pw *PkgEncoder) NewEncoderRaw(k RelocKind) Encoder {
|
||||
idx := Index(len(pw.elems[k]))
|
||||
pw.elems[k] = append(pw.elems[k], "") // placeholder
|
||||
|
||||
return Encoder{
|
||||
p: pw,
|
||||
k: k,
|
||||
Idx: idx,
|
||||
}
|
||||
}
|
||||
|
||||
// An Encoder provides methods for encoding an individual element's
|
||||
// bitstream data.
|
||||
type Encoder struct {
|
||||
p *PkgEncoder
|
||||
|
||||
Relocs []RelocEnt
|
||||
Data bytes.Buffer // accumulated element bitstream data
|
||||
|
||||
encodingRelocHeader bool
|
||||
|
||||
k RelocKind
|
||||
Idx Index // index within relocation section
|
||||
}
|
||||
|
||||
// Flush finalizes the element's bitstream and returns its Index.
|
||||
func (w *Encoder) Flush() Index {
|
||||
var sb bytes.Buffer // TODO(mdempsky): strings.Builder after #44505 is resolved
|
||||
|
||||
// Backup the data so we write the relocations at the front.
|
||||
var tmp bytes.Buffer
|
||||
io.Copy(&tmp, &w.Data)
|
||||
|
||||
// TODO(mdempsky): Consider writing these out separately so they're
|
||||
// easier to strip, along with function bodies, so that we can prune
|
||||
// down to just the data that's relevant to go/types.
|
||||
if w.encodingRelocHeader {
|
||||
panic("encodingRelocHeader already true; recursive flush?")
|
||||
}
|
||||
w.encodingRelocHeader = true
|
||||
w.Sync(SyncRelocs)
|
||||
w.Len(len(w.Relocs))
|
||||
for _, rEnt := range w.Relocs {
|
||||
w.Sync(SyncReloc)
|
||||
w.Len(int(rEnt.Kind))
|
||||
w.Len(int(rEnt.Idx))
|
||||
}
|
||||
|
||||
io.Copy(&sb, &w.Data)
|
||||
io.Copy(&sb, &tmp)
|
||||
w.p.elems[w.k][w.Idx] = sb.String()
|
||||
|
||||
return w.Idx
|
||||
}
|
||||
|
||||
func (w *Encoder) checkErr(err error) {
|
||||
if err != nil {
|
||||
errorf("unexpected encoding error: %v", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Encoder) rawUvarint(x uint64) {
|
||||
var buf [binary.MaxVarintLen64]byte
|
||||
n := binary.PutUvarint(buf[:], x)
|
||||
_, err := w.Data.Write(buf[:n])
|
||||
w.checkErr(err)
|
||||
}
|
||||
|
||||
func (w *Encoder) rawVarint(x int64) {
|
||||
// Zig-zag encode.
|
||||
ux := uint64(x) << 1
|
||||
if x < 0 {
|
||||
ux = ^ux
|
||||
}
|
||||
|
||||
w.rawUvarint(ux)
|
||||
}
|
||||
|
||||
func (w *Encoder) rawReloc(r RelocKind, idx Index) int {
|
||||
// TODO(mdempsky): Use map for lookup; this takes quadratic time.
|
||||
for i, rEnt := range w.Relocs {
|
||||
if rEnt.Kind == r && rEnt.Idx == idx {
|
||||
return i
|
||||
}
|
||||
}
|
||||
|
||||
i := len(w.Relocs)
|
||||
w.Relocs = append(w.Relocs, RelocEnt{r, idx})
|
||||
return i
|
||||
}
|
||||
|
||||
func (w *Encoder) Sync(m SyncMarker) {
|
||||
if !w.p.SyncMarkers() {
|
||||
return
|
||||
}
|
||||
|
||||
// Writing out stack frame string references requires working
|
||||
// relocations, but writing out the relocations themselves involves
|
||||
// sync markers. To prevent infinite recursion, we simply trim the
|
||||
// stack frame for sync markers within the relocation header.
|
||||
var frames []string
|
||||
if !w.encodingRelocHeader && w.p.syncFrames > 0 {
|
||||
pcs := make([]uintptr, w.p.syncFrames)
|
||||
n := runtime.Callers(2, pcs)
|
||||
frames = fmtFrames(pcs[:n]...)
|
||||
}
|
||||
|
||||
// TODO(mdempsky): Save space by writing out stack frames as a
|
||||
// linked list so we can share common stack frames.
|
||||
w.rawUvarint(uint64(m))
|
||||
w.rawUvarint(uint64(len(frames)))
|
||||
for _, frame := range frames {
|
||||
w.rawUvarint(uint64(w.rawReloc(RelocString, w.p.StringIdx(frame))))
|
||||
}
|
||||
}
|
||||
|
||||
// Bool encodes and writes a bool value into the element bitstream,
|
||||
// and then returns the bool value.
|
||||
//
|
||||
// For simple, 2-alternative encodings, the idiomatic way to call Bool
|
||||
// is something like:
|
||||
//
|
||||
// if w.Bool(x != 0) {
|
||||
// // alternative #1
|
||||
// } else {
|
||||
// // alternative #2
|
||||
// }
|
||||
//
|
||||
// For multi-alternative encodings, use Code instead.
|
||||
func (w *Encoder) Bool(b bool) bool {
|
||||
w.Sync(SyncBool)
|
||||
var x byte
|
||||
if b {
|
||||
x = 1
|
||||
}
|
||||
err := w.Data.WriteByte(x)
|
||||
w.checkErr(err)
|
||||
return b
|
||||
}
|
||||
|
||||
// Int64 encodes and writes an int64 value into the element bitstream.
|
||||
func (w *Encoder) Int64(x int64) {
|
||||
w.Sync(SyncInt64)
|
||||
w.rawVarint(x)
|
||||
}
|
||||
|
||||
// Uint64 encodes and writes a uint64 value into the element bitstream.
|
||||
func (w *Encoder) Uint64(x uint64) {
|
||||
w.Sync(SyncUint64)
|
||||
w.rawUvarint(x)
|
||||
}
|
||||
|
||||
// Len encodes and writes a non-negative int value into the element bitstream.
|
||||
func (w *Encoder) Len(x int) { assert(x >= 0); w.Uint64(uint64(x)) }
|
||||
|
||||
// Int encodes and writes an int value into the element bitstream.
|
||||
func (w *Encoder) Int(x int) { w.Int64(int64(x)) }
|
||||
|
||||
// Len encodes and writes a uint value into the element bitstream.
|
||||
func (w *Encoder) Uint(x uint) { w.Uint64(uint64(x)) }
|
||||
|
||||
// Reloc encodes and writes a relocation for the given (section,
|
||||
// index) pair into the element bitstream.
|
||||
//
|
||||
// Note: Only the index is formally written into the element
|
||||
// bitstream, so bitstream decoders must know from context which
|
||||
// section an encoded relocation refers to.
|
||||
func (w *Encoder) Reloc(r RelocKind, idx Index) {
|
||||
w.Sync(SyncUseReloc)
|
||||
w.Len(w.rawReloc(r, idx))
|
||||
}
|
||||
|
||||
// Code encodes and writes a Code value into the element bitstream.
|
||||
func (w *Encoder) Code(c Code) {
|
||||
w.Sync(c.Marker())
|
||||
w.Len(c.Value())
|
||||
}
|
||||
|
||||
// String encodes and writes a string value into the element
|
||||
// bitstream.
|
||||
//
|
||||
// Internally, strings are deduplicated by adding them to the strings
|
||||
// section (if not already present), and then writing a relocation
|
||||
// into the element bitstream.
|
||||
func (w *Encoder) String(s string) {
|
||||
w.Sync(SyncString)
|
||||
w.Reloc(RelocString, w.p.StringIdx(s))
|
||||
}
|
||||
|
||||
// Strings encodes and writes a variable-length slice of strings into
|
||||
// the element bitstream.
|
||||
func (w *Encoder) Strings(ss []string) {
|
||||
w.Len(len(ss))
|
||||
for _, s := range ss {
|
||||
w.String(s)
|
||||
}
|
||||
}
|
||||
|
||||
// Value encodes and writes a constant.Value into the element
|
||||
// bitstream.
|
||||
func (w *Encoder) Value(val constant.Value) {
|
||||
w.Sync(SyncValue)
|
||||
if w.Bool(val.Kind() == constant.Complex) {
|
||||
w.scalar(constant.Real(val))
|
||||
w.scalar(constant.Imag(val))
|
||||
} else {
|
||||
w.scalar(val)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Encoder) scalar(val constant.Value) {
|
||||
switch v := constant.Val(val).(type) {
|
||||
default:
|
||||
errorf("unhandled %v (%v)", val, val.Kind())
|
||||
case bool:
|
||||
w.Code(ValBool)
|
||||
w.Bool(v)
|
||||
case string:
|
||||
w.Code(ValString)
|
||||
w.String(v)
|
||||
case int64:
|
||||
w.Code(ValInt64)
|
||||
w.Int64(v)
|
||||
case *big.Int:
|
||||
w.Code(ValBigInt)
|
||||
w.bigInt(v)
|
||||
case *big.Rat:
|
||||
w.Code(ValBigRat)
|
||||
w.bigInt(v.Num())
|
||||
w.bigInt(v.Denom())
|
||||
case *big.Float:
|
||||
w.Code(ValBigFloat)
|
||||
w.bigFloat(v)
|
||||
}
|
||||
}
|
||||
|
||||
func (w *Encoder) bigInt(v *big.Int) {
|
||||
b := v.Bytes()
|
||||
w.String(string(b)) // TODO: More efficient encoding.
|
||||
w.Bool(v.Sign() < 0)
|
||||
}
|
||||
|
||||
func (w *Encoder) bigFloat(v *big.Float) {
|
||||
b := v.Append(nil, 'p', -1)
|
||||
w.String(string(b)) // TODO: More efficient encoding.
|
||||
}
|
9
vendor/golang.org/x/tools/go/internal/pkgbits/flags.go
generated
vendored
Normal file
9
vendor/golang.org/x/tools/go/internal/pkgbits/flags.go
generated
vendored
Normal file
|
@ -0,0 +1,9 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
const (
|
||||
flagSyncMarkers = 1 << iota // file format contains sync markers
|
||||
)
|
21
vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go
generated
vendored
Normal file
21
vendor/golang.org/x/tools/go/internal/pkgbits/frames_go1.go
generated
vendored
Normal file
|
@ -0,0 +1,21 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build !go1.7
|
||||
// +build !go1.7
|
||||
|
||||
// TODO(mdempsky): Remove after #44505 is resolved
|
||||
|
||||
package pkgbits
|
||||
|
||||
import "runtime"
|
||||
|
||||
func walkFrames(pcs []uintptr, visit frameVisitor) {
|
||||
for _, pc := range pcs {
|
||||
fn := runtime.FuncForPC(pc)
|
||||
file, line := fn.FileLine(pc)
|
||||
|
||||
visit(file, line, fn.Name(), pc-fn.Entry())
|
||||
}
|
||||
}
|
28
vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go
generated
vendored
Normal file
28
vendor/golang.org/x/tools/go/internal/pkgbits/frames_go17.go
generated
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
//go:build go1.7
|
||||
// +build go1.7
|
||||
|
||||
package pkgbits
|
||||
|
||||
import "runtime"
|
||||
|
||||
// walkFrames calls visit for each call frame represented by pcs.
|
||||
//
|
||||
// pcs should be a slice of PCs, as returned by runtime.Callers.
|
||||
func walkFrames(pcs []uintptr, visit frameVisitor) {
|
||||
if len(pcs) == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
frames := runtime.CallersFrames(pcs)
|
||||
for {
|
||||
frame, more := frames.Next()
|
||||
visit(frame.File, frame.Line, frame.Function, frame.PC-frame.Entry)
|
||||
if !more {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
42
vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go
generated
vendored
Normal file
42
vendor/golang.org/x/tools/go/internal/pkgbits/reloc.go
generated
vendored
Normal file
|
@ -0,0 +1,42 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
// A RelocKind indicates a particular section within a unified IR export.
|
||||
type RelocKind int
|
||||
|
||||
// An Index represents a bitstream element index within a particular
|
||||
// section.
|
||||
type Index int
|
||||
|
||||
// A relocEnt (relocation entry) is an entry in an element's local
|
||||
// reference table.
|
||||
//
|
||||
// TODO(mdempsky): Rename this too.
|
||||
type RelocEnt struct {
|
||||
Kind RelocKind
|
||||
Idx Index
|
||||
}
|
||||
|
||||
// Reserved indices within the meta relocation section.
|
||||
const (
|
||||
PublicRootIdx Index = 0
|
||||
PrivateRootIdx Index = 1
|
||||
)
|
||||
|
||||
const (
|
||||
RelocString RelocKind = iota
|
||||
RelocMeta
|
||||
RelocPosBase
|
||||
RelocPkg
|
||||
RelocName
|
||||
RelocType
|
||||
RelocObj
|
||||
RelocObjExt
|
||||
RelocObjDict
|
||||
RelocBody
|
||||
|
||||
numRelocs = iota
|
||||
)
|
17
vendor/golang.org/x/tools/go/internal/pkgbits/support.go
generated
vendored
Normal file
17
vendor/golang.org/x/tools/go/internal/pkgbits/support.go
generated
vendored
Normal file
|
@ -0,0 +1,17 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
import "fmt"
|
||||
|
||||
func assert(b bool) {
|
||||
if !b {
|
||||
panic("assertion failed")
|
||||
}
|
||||
}
|
||||
|
||||
func errorf(format string, args ...interface{}) {
|
||||
panic(fmt.Errorf(format, args...))
|
||||
}
|
113
vendor/golang.org/x/tools/go/internal/pkgbits/sync.go
generated
vendored
Normal file
113
vendor/golang.org/x/tools/go/internal/pkgbits/sync.go
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
// Copyright 2021 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package pkgbits
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// fmtFrames formats a backtrace for reporting reader/writer desyncs.
|
||||
func fmtFrames(pcs ...uintptr) []string {
|
||||
res := make([]string, 0, len(pcs))
|
||||
walkFrames(pcs, func(file string, line int, name string, offset uintptr) {
|
||||
// Trim package from function name. It's just redundant noise.
|
||||
name = strings.TrimPrefix(name, "cmd/compile/internal/noder.")
|
||||
|
||||
res = append(res, fmt.Sprintf("%s:%v: %s +0x%v", file, line, name, offset))
|
||||
})
|
||||
return res
|
||||
}
|
||||
|
||||
type frameVisitor func(file string, line int, name string, offset uintptr)
|
||||
|
||||
// SyncMarker is an enum type that represents markers that may be
|
||||
// written to export data to ensure the reader and writer stay
|
||||
// synchronized.
|
||||
type SyncMarker int
|
||||
|
||||
//go:generate stringer -type=SyncMarker -trimprefix=Sync
|
||||
|
||||
const (
|
||||
_ SyncMarker = iota
|
||||
|
||||
// Public markers (known to go/types importers).
|
||||
|
||||
// Low-level coding markers.
|
||||
SyncEOF
|
||||
SyncBool
|
||||
SyncInt64
|
||||
SyncUint64
|
||||
SyncString
|
||||
SyncValue
|
||||
SyncVal
|
||||
SyncRelocs
|
||||
SyncReloc
|
||||
SyncUseReloc
|
||||
|
||||
// Higher-level object and type markers.
|
||||
SyncPublic
|
||||
SyncPos
|
||||
SyncPosBase
|
||||
SyncObject
|
||||
SyncObject1
|
||||
SyncPkg
|
||||
SyncPkgDef
|
||||
SyncMethod
|
||||
SyncType
|
||||
SyncTypeIdx
|
||||
SyncTypeParamNames
|
||||
SyncSignature
|
||||
SyncParams
|
||||
SyncParam
|
||||
SyncCodeObj
|
||||
SyncSym
|
||||
SyncLocalIdent
|
||||
SyncSelector
|
||||
|
||||
// Private markers (only known to cmd/compile).
|
||||
SyncPrivate
|
||||
|
||||
SyncFuncExt
|
||||
SyncVarExt
|
||||
SyncTypeExt
|
||||
SyncPragma
|
||||
|
||||
SyncExprList
|
||||
SyncExprs
|
||||
SyncExpr
|
||||
SyncExprType
|
||||
SyncAssign
|
||||
SyncOp
|
||||
SyncFuncLit
|
||||
SyncCompLit
|
||||
|
||||
SyncDecl
|
||||
SyncFuncBody
|
||||
SyncOpenScope
|
||||
SyncCloseScope
|
||||
SyncCloseAnotherScope
|
||||
SyncDeclNames
|
||||
SyncDeclName
|
||||
|
||||
SyncStmts
|
||||
SyncBlockStmt
|
||||
SyncIfStmt
|
||||
SyncForStmt
|
||||
SyncSwitchStmt
|
||||
SyncRangeStmt
|
||||
SyncCaseClause
|
||||
SyncCommClause
|
||||
SyncSelectStmt
|
||||
SyncDecls
|
||||
SyncLabeledStmt
|
||||
SyncUseObjLocal
|
||||
SyncAddLocal
|
||||
SyncLinkname
|
||||
SyncStmt1
|
||||
SyncStmtsEnd
|
||||
SyncLabel
|
||||
SyncOptLabel
|
||||
)
|
89
vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go
generated
vendored
Normal file
89
vendor/golang.org/x/tools/go/internal/pkgbits/syncmarker_string.go
generated
vendored
Normal file
|
@ -0,0 +1,89 @@
|
|||
// Code generated by "stringer -type=SyncMarker -trimprefix=Sync"; DO NOT EDIT.
|
||||
|
||||
package pkgbits
|
||||
|
||||
import "strconv"
|
||||
|
||||
func _() {
|
||||
// An "invalid array index" compiler error signifies that the constant values have changed.
|
||||
// Re-run the stringer command to generate them again.
|
||||
var x [1]struct{}
|
||||
_ = x[SyncEOF-1]
|
||||
_ = x[SyncBool-2]
|
||||
_ = x[SyncInt64-3]
|
||||
_ = x[SyncUint64-4]
|
||||
_ = x[SyncString-5]
|
||||
_ = x[SyncValue-6]
|
||||
_ = x[SyncVal-7]
|
||||
_ = x[SyncRelocs-8]
|
||||
_ = x[SyncReloc-9]
|
||||
_ = x[SyncUseReloc-10]
|
||||
_ = x[SyncPublic-11]
|
||||
_ = x[SyncPos-12]
|
||||
_ = x[SyncPosBase-13]
|
||||
_ = x[SyncObject-14]
|
||||
_ = x[SyncObject1-15]
|
||||
_ = x[SyncPkg-16]
|
||||
_ = x[SyncPkgDef-17]
|
||||
_ = x[SyncMethod-18]
|
||||
_ = x[SyncType-19]
|
||||
_ = x[SyncTypeIdx-20]
|
||||
_ = x[SyncTypeParamNames-21]
|
||||
_ = x[SyncSignature-22]
|
||||
_ = x[SyncParams-23]
|
||||
_ = x[SyncParam-24]
|
||||
_ = x[SyncCodeObj-25]
|
||||
_ = x[SyncSym-26]
|
||||
_ = x[SyncLocalIdent-27]
|
||||
_ = x[SyncSelector-28]
|
||||
_ = x[SyncPrivate-29]
|
||||
_ = x[SyncFuncExt-30]
|
||||
_ = x[SyncVarExt-31]
|
||||
_ = x[SyncTypeExt-32]
|
||||
_ = x[SyncPragma-33]
|
||||
_ = x[SyncExprList-34]
|
||||
_ = x[SyncExprs-35]
|
||||
_ = x[SyncExpr-36]
|
||||
_ = x[SyncExprType-37]
|
||||
_ = x[SyncAssign-38]
|
||||
_ = x[SyncOp-39]
|
||||
_ = x[SyncFuncLit-40]
|
||||
_ = x[SyncCompLit-41]
|
||||
_ = x[SyncDecl-42]
|
||||
_ = x[SyncFuncBody-43]
|
||||
_ = x[SyncOpenScope-44]
|
||||
_ = x[SyncCloseScope-45]
|
||||
_ = x[SyncCloseAnotherScope-46]
|
||||
_ = x[SyncDeclNames-47]
|
||||
_ = x[SyncDeclName-48]
|
||||
_ = x[SyncStmts-49]
|
||||
_ = x[SyncBlockStmt-50]
|
||||
_ = x[SyncIfStmt-51]
|
||||
_ = x[SyncForStmt-52]
|
||||
_ = x[SyncSwitchStmt-53]
|
||||
_ = x[SyncRangeStmt-54]
|
||||
_ = x[SyncCaseClause-55]
|
||||
_ = x[SyncCommClause-56]
|
||||
_ = x[SyncSelectStmt-57]
|
||||
_ = x[SyncDecls-58]
|
||||
_ = x[SyncLabeledStmt-59]
|
||||
_ = x[SyncUseObjLocal-60]
|
||||
_ = x[SyncAddLocal-61]
|
||||
_ = x[SyncLinkname-62]
|
||||
_ = x[SyncStmt1-63]
|
||||
_ = x[SyncStmtsEnd-64]
|
||||
_ = x[SyncLabel-65]
|
||||
_ = x[SyncOptLabel-66]
|
||||
}
|
||||
|
||||
const _SyncMarker_name = "EOFBoolInt64Uint64StringValueValRelocsRelocUseRelocPublicPosPosBaseObjectObject1PkgPkgDefMethodTypeTypeIdxTypeParamNamesSignatureParamsParamCodeObjSymLocalIdentSelectorPrivateFuncExtVarExtTypeExtPragmaExprListExprsExprExprTypeAssignOpFuncLitCompLitDeclFuncBodyOpenScopeCloseScopeCloseAnotherScopeDeclNamesDeclNameStmtsBlockStmtIfStmtForStmtSwitchStmtRangeStmtCaseClauseCommClauseSelectStmtDeclsLabeledStmtUseObjLocalAddLocalLinknameStmt1StmtsEndLabelOptLabel"
|
||||
|
||||
var _SyncMarker_index = [...]uint16{0, 3, 7, 12, 18, 24, 29, 32, 38, 43, 51, 57, 60, 67, 73, 80, 83, 89, 95, 99, 106, 120, 129, 135, 140, 147, 150, 160, 168, 175, 182, 188, 195, 201, 209, 214, 218, 226, 232, 234, 241, 248, 252, 260, 269, 279, 296, 305, 313, 318, 327, 333, 340, 350, 359, 369, 379, 389, 394, 405, 416, 424, 432, 437, 445, 450, 458}
|
||||
|
||||
func (i SyncMarker) String() string {
|
||||
i -= 1
|
||||
if i < 0 || i >= SyncMarker(len(_SyncMarker_index)-1) {
|
||||
return "SyncMarker(" + strconv.FormatInt(int64(i+1), 10) + ")"
|
||||
}
|
||||
return _SyncMarker_name[_SyncMarker_index[i]:_SyncMarker_index[i+1]]
|
||||
}
|
42
vendor/golang.org/x/tools/go/loader/doc.go
generated
vendored
42
vendor/golang.org/x/tools/go/loader/doc.go
generated
vendored
|
@ -20,36 +20,35 @@
|
|||
// be called any number of times. Finally, these are followed by a
|
||||
// call to Load() to actually load and type-check the program.
|
||||
//
|
||||
// var conf loader.Config
|
||||
// var conf loader.Config
|
||||
//
|
||||
// // Use the command-line arguments to specify
|
||||
// // a set of initial packages to load from source.
|
||||
// // See FromArgsUsage for help.
|
||||
// rest, err := conf.FromArgs(os.Args[1:], wantTests)
|
||||
// // Use the command-line arguments to specify
|
||||
// // a set of initial packages to load from source.
|
||||
// // See FromArgsUsage for help.
|
||||
// rest, err := conf.FromArgs(os.Args[1:], wantTests)
|
||||
//
|
||||
// // Parse the specified files and create an ad hoc package with path "foo".
|
||||
// // All files must have the same 'package' declaration.
|
||||
// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
|
||||
// // Parse the specified files and create an ad hoc package with path "foo".
|
||||
// // All files must have the same 'package' declaration.
|
||||
// conf.CreateFromFilenames("foo", "foo.go", "bar.go")
|
||||
//
|
||||
// // Create an ad hoc package with path "foo" from
|
||||
// // the specified already-parsed files.
|
||||
// // All ASTs must have the same 'package' declaration.
|
||||
// conf.CreateFromFiles("foo", parsedFiles)
|
||||
// // Create an ad hoc package with path "foo" from
|
||||
// // the specified already-parsed files.
|
||||
// // All ASTs must have the same 'package' declaration.
|
||||
// conf.CreateFromFiles("foo", parsedFiles)
|
||||
//
|
||||
// // Add "runtime" to the set of packages to be loaded.
|
||||
// conf.Import("runtime")
|
||||
// // Add "runtime" to the set of packages to be loaded.
|
||||
// conf.Import("runtime")
|
||||
//
|
||||
// // Adds "fmt" and "fmt_test" to the set of packages
|
||||
// // to be loaded. "fmt" will include *_test.go files.
|
||||
// conf.ImportWithTests("fmt")
|
||||
// // Adds "fmt" and "fmt_test" to the set of packages
|
||||
// // to be loaded. "fmt" will include *_test.go files.
|
||||
// conf.ImportWithTests("fmt")
|
||||
//
|
||||
// // Finally, load all the packages specified by the configuration.
|
||||
// prog, err := conf.Load()
|
||||
// // Finally, load all the packages specified by the configuration.
|
||||
// prog, err := conf.Load()
|
||||
//
|
||||
// See examples_test.go for examples of API usage.
|
||||
//
|
||||
//
|
||||
// CONCEPTS AND TERMINOLOGY
|
||||
// # CONCEPTS AND TERMINOLOGY
|
||||
//
|
||||
// The WORKSPACE is the set of packages accessible to the loader. The
|
||||
// workspace is defined by Config.Build, a *build.Context. The
|
||||
|
@ -92,7 +91,6 @@
|
|||
// The INITIAL packages are those specified in the configuration. A
|
||||
// DEPENDENCY is a package loaded to satisfy an import in an initial
|
||||
// package or another dependency.
|
||||
//
|
||||
package loader
|
||||
|
||||
// IMPLEMENTATION NOTES
|
||||
|
|
23
vendor/golang.org/x/tools/go/loader/loader.go
generated
vendored
23
vendor/golang.org/x/tools/go/loader/loader.go
generated
vendored
|
@ -23,6 +23,7 @@ import (
|
|||
|
||||
"golang.org/x/tools/go/ast/astutil"
|
||||
"golang.org/x/tools/go/internal/cgo"
|
||||
"golang.org/x/tools/internal/typeparams"
|
||||
)
|
||||
|
||||
var ignoreVendor build.ImportMode
|
||||
|
@ -178,7 +179,6 @@ type Program struct {
|
|||
// for a single package.
|
||||
//
|
||||
// Not mutated once exposed via the API.
|
||||
//
|
||||
type PackageInfo struct {
|
||||
Pkg *types.Package
|
||||
Importable bool // true if 'import "Pkg.Path()"' would resolve to this
|
||||
|
@ -216,7 +216,6 @@ func (conf *Config) fset() *token.FileSet {
|
|||
// src specifies the parser input as a string, []byte, or io.Reader, and
|
||||
// filename is its apparent name. If src is nil, the contents of
|
||||
// filename are read from the file system.
|
||||
//
|
||||
func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) {
|
||||
// TODO(adonovan): use conf.build() etc like parseFiles does.
|
||||
return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode)
|
||||
|
@ -261,7 +260,6 @@ A '--' argument terminates the list of packages.
|
|||
//
|
||||
// Only superficial errors are reported at this stage; errors dependent
|
||||
// on I/O are detected during Load.
|
||||
//
|
||||
func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
|
||||
var rest []string
|
||||
for i, arg := range args {
|
||||
|
@ -299,14 +297,12 @@ func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) {
|
|||
// CreateFromFilenames is a convenience function that adds
|
||||
// a conf.CreatePkgs entry to create a package of the specified *.go
|
||||
// files.
|
||||
//
|
||||
func (conf *Config) CreateFromFilenames(path string, filenames ...string) {
|
||||
conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames})
|
||||
}
|
||||
|
||||
// CreateFromFiles is a convenience function that adds a conf.CreatePkgs
|
||||
// entry to create package of the specified path and parsed files.
|
||||
//
|
||||
func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
|
||||
conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files})
|
||||
}
|
||||
|
@ -320,12 +316,10 @@ func (conf *Config) CreateFromFiles(path string, files ...*ast.File) {
|
|||
// In addition, if any *_test.go files contain a "package x_test"
|
||||
// declaration, an additional package comprising just those files will
|
||||
// be added to CreatePkgs.
|
||||
//
|
||||
func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) }
|
||||
|
||||
// Import is a convenience function that adds path to ImportPkgs, the
|
||||
// set of initial packages that will be imported from source.
|
||||
//
|
||||
func (conf *Config) Import(path string) { conf.addImport(path, false) }
|
||||
|
||||
func (conf *Config) addImport(path string, tests bool) {
|
||||
|
@ -344,7 +338,6 @@ func (conf *Config) addImport(path string, tests bool) {
|
|||
// exact is defined as for astutil.PathEnclosingInterval.
|
||||
//
|
||||
// The zero value is returned if not found.
|
||||
//
|
||||
func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) {
|
||||
for _, info := range prog.AllPackages {
|
||||
for _, f := range info.Files {
|
||||
|
@ -367,7 +360,6 @@ func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageIn
|
|||
|
||||
// InitialPackages returns a new slice containing the set of initial
|
||||
// packages (Created + Imported) in unspecified order.
|
||||
//
|
||||
func (prog *Program) InitialPackages() []*PackageInfo {
|
||||
infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported))
|
||||
infos = append(infos, prog.Created...)
|
||||
|
@ -434,7 +426,6 @@ type findpkgValue struct {
|
|||
// Upon completion, exactly one of info and err is non-nil:
|
||||
// info on successful creation of a package, err otherwise.
|
||||
// A successful package may still contain type errors.
|
||||
//
|
||||
type importInfo struct {
|
||||
path string // import path
|
||||
info *PackageInfo // results of typechecking (including errors)
|
||||
|
@ -474,7 +465,6 @@ type importError struct {
|
|||
// false, Load will fail if any package had an error.
|
||||
//
|
||||
// It is an error if no packages were loaded.
|
||||
//
|
||||
func (conf *Config) Load() (*Program, error) {
|
||||
// Create a simple default error handler for parse/type errors.
|
||||
if conf.TypeChecker.Error == nil {
|
||||
|
@ -731,10 +721,10 @@ func (conf *Config) build() *build.Context {
|
|||
// errors that were encountered.
|
||||
//
|
||||
// 'which' indicates which files to include:
|
||||
// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
|
||||
// 't': include in-package *_test.go source files (TestGoFiles)
|
||||
// 'x': include external *_test.go source files. (XTestGoFiles)
|
||||
//
|
||||
// 'g': include non-test *.go source files (GoFiles + processed CgoFiles)
|
||||
// 't': include in-package *_test.go source files (TestGoFiles)
|
||||
// 'x': include external *_test.go source files. (XTestGoFiles)
|
||||
func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) {
|
||||
if bp.ImportPath == "unsafe" {
|
||||
return nil, nil
|
||||
|
@ -775,7 +765,6 @@ func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.Fil
|
|||
// in the package's PackageInfo).
|
||||
//
|
||||
// Idempotent.
|
||||
//
|
||||
func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) {
|
||||
if to == "C" {
|
||||
// This should be unreachable, but ad hoc packages are
|
||||
|
@ -867,7 +856,6 @@ func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMo
|
|||
//
|
||||
// fromDir is the directory containing the import declaration that
|
||||
// caused these imports.
|
||||
//
|
||||
func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) {
|
||||
if fromPath != "" {
|
||||
// We're loading a set of imports.
|
||||
|
@ -950,7 +938,6 @@ func (imp *importer) findPath(from, to string) []string {
|
|||
// caller must call awaitCompletion() before accessing its info field.
|
||||
//
|
||||
// startLoad is concurrency-safe and idempotent.
|
||||
//
|
||||
func (imp *importer) startLoad(bp *build.Package) *importInfo {
|
||||
path := bp.ImportPath
|
||||
imp.importedMu.Lock()
|
||||
|
@ -994,7 +981,6 @@ func (imp *importer) load(bp *build.Package) *PackageInfo {
|
|||
//
|
||||
// cycleCheck determines whether the imports within files create
|
||||
// dependency edges that should be checked for potential cycles.
|
||||
//
|
||||
func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) {
|
||||
// Ensure the dependencies are loaded, in parallel.
|
||||
var fromPath string
|
||||
|
@ -1053,6 +1039,7 @@ func (imp *importer) newPackageInfo(path, dir string) *PackageInfo {
|
|||
errorFunc: imp.conf.TypeChecker.Error,
|
||||
dir: dir,
|
||||
}
|
||||
typeparams.InitInstanceInfo(&info.Info)
|
||||
|
||||
// Copy the types.Config so we can vary it across PackageInfos.
|
||||
tc := imp.conf.TypeChecker
|
||||
|
|
1
vendor/golang.org/x/tools/go/loader/util.go
generated
vendored
1
vendor/golang.org/x/tools/go/loader/util.go
generated
vendored
|
@ -27,7 +27,6 @@ var ioLimit = make(chan bool, 10)
|
|||
//
|
||||
// I/O is done via ctxt, which may specify a virtual file system.
|
||||
// displayPath is used to transform the filenames attached to the ASTs.
|
||||
//
|
||||
func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) {
|
||||
if displayPath == nil {
|
||||
displayPath = func(path string) string { return path }
|
||||
|
|
1
vendor/golang.org/x/tools/go/packages/doc.go
generated
vendored
1
vendor/golang.org/x/tools/go/packages/doc.go
generated
vendored
|
@ -67,7 +67,6 @@ Most tools should pass their command-line arguments (after any flags)
|
|||
uninterpreted to the loader, so that the loader can interpret them
|
||||
according to the conventions of the underlying build system.
|
||||
See the Example function for typical usage.
|
||||
|
||||
*/
|
||||
package packages // import "golang.org/x/tools/go/packages"
|
||||
|
||||
|
|
92
vendor/golang.org/x/tools/go/packages/golist.go
generated
vendored
92
vendor/golang.org/x/tools/go/packages/golist.go
generated
vendored
|
@ -26,7 +26,6 @@ import (
|
|||
"golang.org/x/tools/go/internal/packagesdriver"
|
||||
"golang.org/x/tools/internal/gocommand"
|
||||
"golang.org/x/tools/internal/packagesinternal"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
// debug controls verbose logging.
|
||||
|
@ -303,11 +302,12 @@ func (state *golistState) runContainsQueries(response *responseDeduper, queries
|
|||
}
|
||||
dirResponse, err := state.createDriverResponse(pattern)
|
||||
|
||||
// If there was an error loading the package, or the package is returned
|
||||
// with errors, try to load the file as an ad-hoc package.
|
||||
// If there was an error loading the package, or no packages are returned,
|
||||
// or the package is returned with errors, try to load the file as an
|
||||
// ad-hoc package.
|
||||
// Usually the error will appear in a returned package, but may not if we're
|
||||
// in module mode and the ad-hoc is located outside a module.
|
||||
if err != nil || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
|
||||
if err != nil || len(dirResponse.Packages) == 0 || len(dirResponse.Packages) == 1 && len(dirResponse.Packages[0].GoFiles) == 0 &&
|
||||
len(dirResponse.Packages[0].Errors) == 1 {
|
||||
var queryErr error
|
||||
if dirResponse, queryErr = state.adhocPackage(pattern, query); queryErr != nil {
|
||||
|
@ -393,6 +393,8 @@ type jsonPackage struct {
|
|||
CompiledGoFiles []string
|
||||
IgnoredGoFiles []string
|
||||
IgnoredOtherFiles []string
|
||||
EmbedPatterns []string
|
||||
EmbedFiles []string
|
||||
CFiles []string
|
||||
CgoFiles []string
|
||||
CXXFiles []string
|
||||
|
@ -444,7 +446,11 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
|
|||
|
||||
// Run "go list" for complete
|
||||
// information on the specified packages.
|
||||
buf, err := state.invokeGo("list", golistargs(state.cfg, words)...)
|
||||
goVersion, err := state.getGoVersion()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf, err := state.invokeGo("list", golistargs(state.cfg, words, goVersion)...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -565,6 +571,8 @@ func (state *golistState) createDriverResponse(words ...string) (*driverResponse
|
|||
GoFiles: absJoin(p.Dir, p.GoFiles, p.CgoFiles),
|
||||
CompiledGoFiles: absJoin(p.Dir, p.CompiledGoFiles),
|
||||
OtherFiles: absJoin(p.Dir, otherFiles(p)...),
|
||||
EmbedFiles: absJoin(p.Dir, p.EmbedFiles),
|
||||
EmbedPatterns: absJoin(p.Dir, p.EmbedPatterns),
|
||||
IgnoredFiles: absJoin(p.Dir, p.IgnoredGoFiles, p.IgnoredOtherFiles),
|
||||
forTest: p.ForTest,
|
||||
depsErrors: p.DepsErrors,
|
||||
|
@ -805,17 +813,83 @@ func absJoin(dir string, fileses ...[]string) (res []string) {
|
|||
return res
|
||||
}
|
||||
|
||||
func golistargs(cfg *Config, words []string) []string {
|
||||
func jsonFlag(cfg *Config, goVersion int) string {
|
||||
if goVersion < 19 {
|
||||
return "-json"
|
||||
}
|
||||
var fields []string
|
||||
added := make(map[string]bool)
|
||||
addFields := func(fs ...string) {
|
||||
for _, f := range fs {
|
||||
if !added[f] {
|
||||
added[f] = true
|
||||
fields = append(fields, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
addFields("Name", "ImportPath", "Error") // These fields are always needed
|
||||
if cfg.Mode&NeedFiles != 0 || cfg.Mode&NeedTypes != 0 {
|
||||
addFields("Dir", "GoFiles", "IgnoredGoFiles", "IgnoredOtherFiles", "CFiles",
|
||||
"CgoFiles", "CXXFiles", "MFiles", "HFiles", "FFiles", "SFiles",
|
||||
"SwigFiles", "SwigCXXFiles", "SysoFiles")
|
||||
if cfg.Tests {
|
||||
addFields("TestGoFiles", "XTestGoFiles")
|
||||
}
|
||||
}
|
||||
if cfg.Mode&NeedTypes != 0 {
|
||||
// CompiledGoFiles seems to be required for the test case TestCgoNoSyntax,
|
||||
// even when -compiled isn't passed in.
|
||||
// TODO(#52435): Should we make the test ask for -compiled, or automatically
|
||||
// request CompiledGoFiles in certain circumstances?
|
||||
addFields("Dir", "CompiledGoFiles")
|
||||
}
|
||||
if cfg.Mode&NeedCompiledGoFiles != 0 {
|
||||
addFields("Dir", "CompiledGoFiles", "Export")
|
||||
}
|
||||
if cfg.Mode&NeedImports != 0 {
|
||||
// When imports are requested, DepOnly is used to distinguish between packages
|
||||
// explicitly requested and transitive imports of those packages.
|
||||
addFields("DepOnly", "Imports", "ImportMap")
|
||||
if cfg.Tests {
|
||||
addFields("TestImports", "XTestImports")
|
||||
}
|
||||
}
|
||||
if cfg.Mode&NeedDeps != 0 {
|
||||
addFields("DepOnly")
|
||||
}
|
||||
if usesExportData(cfg) {
|
||||
// Request Dir in the unlikely case Export is not absolute.
|
||||
addFields("Dir", "Export")
|
||||
}
|
||||
if cfg.Mode&needInternalForTest != 0 {
|
||||
addFields("ForTest")
|
||||
}
|
||||
if cfg.Mode&needInternalDepsErrors != 0 {
|
||||
addFields("DepsErrors")
|
||||
}
|
||||
if cfg.Mode&NeedModule != 0 {
|
||||
addFields("Module")
|
||||
}
|
||||
if cfg.Mode&NeedEmbedFiles != 0 {
|
||||
addFields("EmbedFiles")
|
||||
}
|
||||
if cfg.Mode&NeedEmbedPatterns != 0 {
|
||||
addFields("EmbedPatterns")
|
||||
}
|
||||
return "-json=" + strings.Join(fields, ",")
|
||||
}
|
||||
|
||||
func golistargs(cfg *Config, words []string, goVersion int) []string {
|
||||
const findFlags = NeedImports | NeedTypes | NeedSyntax | NeedTypesInfo
|
||||
fullargs := []string{
|
||||
"-e", "-json",
|
||||
"-e", jsonFlag(cfg, goVersion),
|
||||
fmt.Sprintf("-compiled=%t", cfg.Mode&(NeedCompiledGoFiles|NeedSyntax|NeedTypes|NeedTypesInfo|NeedTypesSizes) != 0),
|
||||
fmt.Sprintf("-test=%t", cfg.Tests),
|
||||
fmt.Sprintf("-export=%t", usesExportData(cfg)),
|
||||
fmt.Sprintf("-deps=%t", cfg.Mode&NeedImports != 0),
|
||||
// go list doesn't let you pass -test and -find together,
|
||||
// probably because you'd just get the TestMain.
|
||||
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0),
|
||||
fmt.Sprintf("-find=%t", !cfg.Tests && cfg.Mode&findFlags == 0 && !usesExportData(cfg)),
|
||||
}
|
||||
fullargs = append(fullargs, cfg.BuildFlags...)
|
||||
fullargs = append(fullargs, "--")
|
||||
|
@ -879,7 +953,7 @@ func (state *golistState) invokeGo(verb string, args ...string) (*bytes.Buffer,
|
|||
if !ok {
|
||||
// Catastrophic error:
|
||||
// - context cancellation
|
||||
return nil, xerrors.Errorf("couldn't run 'go': %w", err)
|
||||
return nil, fmt.Errorf("couldn't run 'go': %w", err)
|
||||
}
|
||||
|
||||
// Old go version?
|
||||
|
|
4
vendor/golang.org/x/tools/go/packages/loadmode_string.go
generated
vendored
4
vendor/golang.org/x/tools/go/packages/loadmode_string.go
generated
vendored
|
@ -15,7 +15,7 @@ var allModes = []LoadMode{
|
|||
NeedCompiledGoFiles,
|
||||
NeedImports,
|
||||
NeedDeps,
|
||||
NeedExportsFile,
|
||||
NeedExportFile,
|
||||
NeedTypes,
|
||||
NeedSyntax,
|
||||
NeedTypesInfo,
|
||||
|
@ -28,7 +28,7 @@ var modeStrings = []string{
|
|||
"NeedCompiledGoFiles",
|
||||
"NeedImports",
|
||||
"NeedDeps",
|
||||
"NeedExportsFile",
|
||||
"NeedExportFile",
|
||||
"NeedTypes",
|
||||
"NeedSyntax",
|
||||
"NeedTypesInfo",
|
||||
|
|
73
vendor/golang.org/x/tools/go/packages/packages.go
generated
vendored
73
vendor/golang.org/x/tools/go/packages/packages.go
generated
vendored
|
@ -39,9 +39,6 @@ import (
|
|||
// Load may return more information than requested.
|
||||
type LoadMode int
|
||||
|
||||
// TODO(matloob): When a V2 of go/packages is released, rename NeedExportsFile to
|
||||
// NeedExportFile to make it consistent with the Package field it's adding.
|
||||
|
||||
const (
|
||||
// NeedName adds Name and PkgPath.
|
||||
NeedName LoadMode = 1 << iota
|
||||
|
@ -59,8 +56,8 @@ const (
|
|||
// NeedDeps adds the fields requested by the LoadMode in the packages in Imports.
|
||||
NeedDeps
|
||||
|
||||
// NeedExportsFile adds ExportFile.
|
||||
NeedExportsFile
|
||||
// NeedExportFile adds ExportFile.
|
||||
NeedExportFile
|
||||
|
||||
// NeedTypes adds Types, Fset, and IllTyped.
|
||||
NeedTypes
|
||||
|
@ -74,12 +71,25 @@ const (
|
|||
// NeedTypesSizes adds TypesSizes.
|
||||
NeedTypesSizes
|
||||
|
||||
// needInternalDepsErrors adds the internal deps errors field for use by gopls.
|
||||
needInternalDepsErrors
|
||||
|
||||
// needInternalForTest adds the internal forTest field.
|
||||
// Tests must also be set on the context for this field to be populated.
|
||||
needInternalForTest
|
||||
|
||||
// typecheckCgo enables full support for type checking cgo. Requires Go 1.15+.
|
||||
// Modifies CompiledGoFiles and Types, and has no effect on its own.
|
||||
typecheckCgo
|
||||
|
||||
// NeedModule adds Module.
|
||||
NeedModule
|
||||
|
||||
// NeedEmbedFiles adds EmbedFiles.
|
||||
NeedEmbedFiles
|
||||
|
||||
// NeedEmbedPatterns adds EmbedPatterns.
|
||||
NeedEmbedPatterns
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -102,6 +112,9 @@ const (
|
|||
// Deprecated: LoadAllSyntax exists for historical compatibility
|
||||
// and should not be used. Please directly specify the needed fields using the Need values.
|
||||
LoadAllSyntax = LoadSyntax | NeedDeps
|
||||
|
||||
// Deprecated: NeedExportsFile is a historical misspelling of NeedExportFile.
|
||||
NeedExportsFile = NeedExportFile
|
||||
)
|
||||
|
||||
// A Config specifies details about how packages should be loaded.
|
||||
|
@ -296,6 +309,14 @@ type Package struct {
|
|||
// including assembly, C, C++, Fortran, Objective-C, SWIG, and so on.
|
||||
OtherFiles []string
|
||||
|
||||
// EmbedFiles lists the absolute file paths of the package's files
|
||||
// embedded with go:embed.
|
||||
EmbedFiles []string
|
||||
|
||||
// EmbedPatterns lists the absolute file patterns of the package's
|
||||
// files embedded with go:embed.
|
||||
EmbedPatterns []string
|
||||
|
||||
// IgnoredFiles lists source files that are not part of the package
|
||||
// using the current build configuration but that might be part of
|
||||
// the package using other build configurations.
|
||||
|
@ -389,6 +410,8 @@ func init() {
|
|||
config.(*Config).modFlag = value
|
||||
}
|
||||
packagesinternal.TypecheckCgo = int(typecheckCgo)
|
||||
packagesinternal.DepsErrors = int(needInternalDepsErrors)
|
||||
packagesinternal.ForTest = int(needInternalForTest)
|
||||
}
|
||||
|
||||
// An Error describes a problem with a package's metadata, syntax, or types.
|
||||
|
@ -431,6 +454,8 @@ type flatPackage struct {
|
|||
GoFiles []string `json:",omitempty"`
|
||||
CompiledGoFiles []string `json:",omitempty"`
|
||||
OtherFiles []string `json:",omitempty"`
|
||||
EmbedFiles []string `json:",omitempty"`
|
||||
EmbedPatterns []string `json:",omitempty"`
|
||||
IgnoredFiles []string `json:",omitempty"`
|
||||
ExportFile string `json:",omitempty"`
|
||||
Imports map[string]string `json:",omitempty"`
|
||||
|
@ -454,6 +479,8 @@ func (p *Package) MarshalJSON() ([]byte, error) {
|
|||
GoFiles: p.GoFiles,
|
||||
CompiledGoFiles: p.CompiledGoFiles,
|
||||
OtherFiles: p.OtherFiles,
|
||||
EmbedFiles: p.EmbedFiles,
|
||||
EmbedPatterns: p.EmbedPatterns,
|
||||
IgnoredFiles: p.IgnoredFiles,
|
||||
ExportFile: p.ExportFile,
|
||||
}
|
||||
|
@ -481,6 +508,8 @@ func (p *Package) UnmarshalJSON(b []byte) error {
|
|||
GoFiles: flat.GoFiles,
|
||||
CompiledGoFiles: flat.CompiledGoFiles,
|
||||
OtherFiles: flat.OtherFiles,
|
||||
EmbedFiles: flat.EmbedFiles,
|
||||
EmbedPatterns: flat.EmbedPatterns,
|
||||
ExportFile: flat.ExportFile,
|
||||
}
|
||||
if len(flat.Imports) > 0 {
|
||||
|
@ -614,7 +643,7 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
|
|||
needsrc := ((ld.Mode&(NeedSyntax|NeedTypesInfo) != 0 && (rootIndex >= 0 || ld.Mode&NeedDeps != 0)) ||
|
||||
// ... or if we need types and the exportData is invalid. We fall back to (incompletely)
|
||||
// typechecking packages from source if they fail to compile.
|
||||
(ld.Mode&NeedTypes|NeedTypesInfo != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
|
||||
(ld.Mode&(NeedTypes|NeedTypesInfo) != 0 && exportDataInvalid)) && pkg.PkgPath != "unsafe"
|
||||
lpkg := &loaderPackage{
|
||||
Package: pkg,
|
||||
needtypes: needtypes,
|
||||
|
@ -752,13 +781,19 @@ func (ld *loader) refine(roots []string, list ...*Package) ([]*Package, error) {
|
|||
ld.pkgs[i].OtherFiles = nil
|
||||
ld.pkgs[i].IgnoredFiles = nil
|
||||
}
|
||||
if ld.requestedMode&NeedEmbedFiles == 0 {
|
||||
ld.pkgs[i].EmbedFiles = nil
|
||||
}
|
||||
if ld.requestedMode&NeedEmbedPatterns == 0 {
|
||||
ld.pkgs[i].EmbedPatterns = nil
|
||||
}
|
||||
if ld.requestedMode&NeedCompiledGoFiles == 0 {
|
||||
ld.pkgs[i].CompiledGoFiles = nil
|
||||
}
|
||||
if ld.requestedMode&NeedImports == 0 {
|
||||
ld.pkgs[i].Imports = nil
|
||||
}
|
||||
if ld.requestedMode&NeedExportsFile == 0 {
|
||||
if ld.requestedMode&NeedExportFile == 0 {
|
||||
ld.pkgs[i].ExportFile = ""
|
||||
}
|
||||
if ld.requestedMode&NeedTypes == 0 {
|
||||
|
@ -1053,7 +1088,6 @@ func (ld *loader) parseFile(filename string) (*ast.File, error) {
|
|||
//
|
||||
// Because files are scanned in parallel, the token.Pos
|
||||
// positions of the resulting ast.Files are not ordered.
|
||||
//
|
||||
func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
|
||||
var wg sync.WaitGroup
|
||||
n := len(filenames)
|
||||
|
@ -1097,7 +1131,6 @@ func (ld *loader) parseFiles(filenames []string) ([]*ast.File, []error) {
|
|||
|
||||
// sameFile returns true if x and y have the same basename and denote
|
||||
// the same file.
|
||||
//
|
||||
func sameFile(x, y string) bool {
|
||||
if x == y {
|
||||
// It could be the case that y doesn't exist.
|
||||
|
@ -1210,8 +1243,13 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("reading %s: %v", lpkg.ExportFile, err)
|
||||
}
|
||||
if _, ok := view["go.shape"]; ok {
|
||||
// Account for the pseudopackage "go.shape" that gets
|
||||
// created by generic code.
|
||||
viewLen++
|
||||
}
|
||||
if viewLen != len(view) {
|
||||
log.Fatalf("Unexpected package creation during export data loading")
|
||||
log.Panicf("golang.org/x/tools/go/packages: unexpected new packages during load of %s", lpkg.PkgPath)
|
||||
}
|
||||
|
||||
lpkg.Types = tpkg
|
||||
|
@ -1222,17 +1260,8 @@ func (ld *loader) loadFromExportData(lpkg *loaderPackage) (*types.Package, error
|
|||
|
||||
// impliedLoadMode returns loadMode with its dependencies.
|
||||
func impliedLoadMode(loadMode LoadMode) LoadMode {
|
||||
if loadMode&NeedTypesInfo != 0 && loadMode&NeedImports == 0 {
|
||||
// If NeedTypesInfo, go/packages needs to do typechecking itself so it can
|
||||
// associate type info with the AST. To do so, we need the export data
|
||||
// for dependencies, which means we need to ask for the direct dependencies.
|
||||
// NeedImports is used to ask for the direct dependencies.
|
||||
loadMode |= NeedImports
|
||||
}
|
||||
|
||||
if loadMode&NeedDeps != 0 && loadMode&NeedImports == 0 {
|
||||
// With NeedDeps we need to load at least direct dependencies.
|
||||
// NeedImports is used to ask for the direct dependencies.
|
||||
if loadMode&(NeedDeps|NeedTypes|NeedTypesInfo) != 0 {
|
||||
// All these things require knowing the import graph.
|
||||
loadMode |= NeedImports
|
||||
}
|
||||
|
||||
|
@ -1240,5 +1269,5 @@ func impliedLoadMode(loadMode LoadMode) LoadMode {
|
|||
}
|
||||
|
||||
func usesExportData(cfg *Config) bool {
|
||||
return cfg.Mode&NeedExportsFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
|
||||
return cfg.Mode&NeedExportFile != 0 || cfg.Mode&NeedTypes != 0 && cfg.Mode&NeedDeps == 0
|
||||
}
|
||||
|
|
113
vendor/golang.org/x/tools/go/ssa/block.go
generated
vendored
Normal file
113
vendor/golang.org/x/tools/go/ssa/block.go
generated
vendored
Normal file
|
@ -0,0 +1,113 @@
|
|||
// Copyright 2022 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package ssa
|
||||
|
||||
import "fmt"
|
||||
|
||||
// This file implements the BasicBlock type.
|
||||
|
||||
// addEdge adds a control-flow graph edge from from to to.
|
||||
func addEdge(from, to *BasicBlock) {
|
||||
from.Succs = append(from.Succs, to)
|
||||
to.Preds = append(to.Preds, from)
|
||||
}
|
||||
|
||||
// Parent returns the function that contains block b.
|
||||
func (b *BasicBlock) Parent() *Function { return b.parent }
|
||||
|
||||
// String returns a human-readable label of this block.
|
||||
// It is not guaranteed unique within the function.
|
||||
func (b *BasicBlock) String() string {
|
||||
return fmt.Sprintf("%d", b.Index)
|
||||
}
|
||||
|
||||
// emit appends an instruction to the current basic block.
|
||||
// If the instruction defines a Value, it is returned.
|
||||
func (b *BasicBlock) emit(i Instruction) Value {
|
||||
i.setBlock(b)
|
||||
b.Instrs = append(b.Instrs, i)
|
||||
v, _ := i.(Value)
|
||||
return v
|
||||
}
|
||||
|
||||
// predIndex returns the i such that b.Preds[i] == c or panics if
|
||||
// there is none.
|
||||
func (b *BasicBlock) predIndex(c *BasicBlock) int {
|
||||
for i, pred := range b.Preds {
|
||||
if pred == c {
|
||||
return i
|
||||
}
|
||||
}
|
||||
panic(fmt.Sprintf("no edge %s -> %s", c, b))
|
||||
}
|
||||
|
||||
// hasPhi returns true if b.Instrs contains φ-nodes.
|
||||
func (b *BasicBlock) hasPhi() bool {
|
||||
_, ok := b.Instrs[0].(*Phi)
|
||||
return ok
|
||||
}
|
||||
|
||||
// phis returns the prefix of b.Instrs containing all the block's φ-nodes.
|
||||
func (b *BasicBlock) phis() []Instruction {
|
||||
for i, instr := range b.Instrs {
|
||||
if _, ok := instr.(*Phi); !ok {
|
||||
return b.Instrs[:i]
|
||||
}
|
||||
}
|
||||
return nil // unreachable in well-formed blocks
|
||||
}
|
||||
|
||||
// replacePred replaces all occurrences of p in b's predecessor list with q.
|
||||
// Ordinarily there should be at most one.
|
||||
func (b *BasicBlock) replacePred(p, q *BasicBlock) {
|
||||
for i, pred := range b.Preds {
|
||||
if pred == p {
|
||||
b.Preds[i] = q
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// replaceSucc replaces all occurrences of p in b's successor list with q.
|
||||
// Ordinarily there should be at most one.
|
||||
func (b *BasicBlock) replaceSucc(p, q *BasicBlock) {
|
||||
for i, succ := range b.Succs {
|
||||
if succ == p {
|
||||
b.Succs[i] = q
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// removePred removes all occurrences of p in b's
|
||||
// predecessor list and φ-nodes.
|
||||
// Ordinarily there should be at most one.
|
||||
func (b *BasicBlock) removePred(p *BasicBlock) {
|
||||
phis := b.phis()
|
||||
|
||||
// We must preserve edge order for φ-nodes.
|
||||
j := 0
|
||||
for i, pred := range b.Preds {
|
||||
if pred != p {
|
||||
b.Preds[j] = b.Preds[i]
|
||||
// Strike out φ-edge too.
|
||||
for _, instr := range phis {
|
||||
phi := instr.(*Phi)
|
||||
phi.Edges[j] = phi.Edges[i]
|
||||
}
|
||||
j++
|
||||
}
|
||||
}
|
||||
// Nil out b.Preds[j:] and φ-edges[j:] to aid GC.
|
||||
for i := j; i < len(b.Preds); i++ {
|
||||
b.Preds[i] = nil
|
||||
for _, instr := range phis {
|
||||
instr.(*Phi).Edges[i] = nil
|
||||
}
|
||||
}
|
||||
b.Preds = b.Preds[:j]
|
||||
for _, instr := range phis {
|
||||
phi := instr.(*Phi)
|
||||
phi.Edges = phi.Edges[:j]
|
||||
}
|
||||
}
|
4
vendor/golang.org/x/tools/go/ssa/blockopt.go
generated
vendored
4
vendor/golang.org/x/tools/go/ssa/blockopt.go
generated
vendored
|
@ -31,7 +31,6 @@ func markReachable(b *BasicBlock) {
|
|||
|
||||
// deleteUnreachableBlocks marks all reachable blocks of f and
|
||||
// eliminates (nils) all others, including possibly cyclic subgraphs.
|
||||
//
|
||||
func deleteUnreachableBlocks(f *Function) {
|
||||
const white, black = 0, -1
|
||||
// We borrow b.Index temporarily as the mark bit.
|
||||
|
@ -61,7 +60,6 @@ func deleteUnreachableBlocks(f *Function) {
|
|||
// jumpThreading attempts to apply simple jump-threading to block b,
|
||||
// in which a->b->c become a->c if b is just a Jump.
|
||||
// The result is true if the optimization was applied.
|
||||
//
|
||||
func jumpThreading(f *Function, b *BasicBlock) bool {
|
||||
if b.Index == 0 {
|
||||
return false // don't apply to entry block
|
||||
|
@ -108,7 +106,6 @@ func jumpThreading(f *Function, b *BasicBlock) bool {
|
|||
// fuseBlocks attempts to apply the block fusion optimization to block
|
||||
// a, in which a->b becomes ab if len(a.Succs)==len(b.Preds)==1.
|
||||
// The result is true if the optimization was applied.
|
||||
//
|
||||
func fuseBlocks(f *Function, a *BasicBlock) bool {
|
||||
if len(a.Succs) != 1 {
|
||||
return false
|
||||
|
@ -150,7 +147,6 @@ func fuseBlocks(f *Function, a *BasicBlock) bool {
|
|||
// optimizeBlocks() performs some simple block optimizations on a
|
||||
// completed function: dead block elimination, block fusion, jump
|
||||
// threading.
|
||||
//
|
||||
func optimizeBlocks(f *Function) {
|
||||
deleteUnreachableBlocks(f)
|
||||
|
||||
|
|
489
vendor/golang.org/x/tools/go/ssa/builder.go
generated
vendored
489
vendor/golang.org/x/tools/go/ssa/builder.go
generated
vendored
File diff suppressed because it is too large
Load diff
7
vendor/golang.org/x/tools/go/ssa/const.go
generated
vendored
7
vendor/golang.org/x/tools/go/ssa/const.go
generated
vendored
|
@ -16,7 +16,6 @@ import (
|
|||
|
||||
// NewConst returns a new constant of the specified value and type.
|
||||
// val must be valid according to the specification of Const.Value.
|
||||
//
|
||||
func NewConst(val constant.Value, typ types.Type) *Const {
|
||||
return &Const{typ, val}
|
||||
}
|
||||
|
@ -29,7 +28,6 @@ func intConst(i int64) *Const {
|
|||
|
||||
// nilConst returns a nil constant of the specified type, which may
|
||||
// be any reference type, including interfaces.
|
||||
//
|
||||
func nilConst(typ types.Type) *Const {
|
||||
return NewConst(nil, typ)
|
||||
}
|
||||
|
@ -42,7 +40,6 @@ func stringConst(s string) *Const {
|
|||
// zeroConst returns a new "zero" constant of the specified type,
|
||||
// which must not be an array or struct type: the zero values of
|
||||
// aggregates are well-defined but cannot be represented by Const.
|
||||
//
|
||||
func zeroConst(t types.Type) *Const {
|
||||
switch t := t.(type) {
|
||||
case *types.Basic:
|
||||
|
@ -119,7 +116,6 @@ func (c *Const) IsNil() bool {
|
|||
|
||||
// Int64 returns the numeric value of this constant truncated to fit
|
||||
// a signed 64-bit integer.
|
||||
//
|
||||
func (c *Const) Int64() int64 {
|
||||
switch x := constant.ToInt(c.Value); x.Kind() {
|
||||
case constant.Int:
|
||||
|
@ -136,7 +132,6 @@ func (c *Const) Int64() int64 {
|
|||
|
||||
// Uint64 returns the numeric value of this constant truncated to fit
|
||||
// an unsigned 64-bit integer.
|
||||
//
|
||||
func (c *Const) Uint64() uint64 {
|
||||
switch x := constant.ToInt(c.Value); x.Kind() {
|
||||
case constant.Int:
|
||||
|
@ -153,7 +148,6 @@ func (c *Const) Uint64() uint64 {
|
|||
|
||||
// Float64 returns the numeric value of this constant truncated to fit
|
||||
// a float64.
|
||||
//
|
||||
func (c *Const) Float64() float64 {
|
||||
f, _ := constant.Float64Val(c.Value)
|
||||
return f
|
||||
|
@ -161,7 +155,6 @@ func (c *Const) Float64() float64 {
|
|||
|
||||
// Complex128 returns the complex value of this constant truncated to
|
||||
// fit a complex128.
|
||||
//
|
||||
func (c *Const) Complex128() complex128 {
|
||||
re, _ := constant.Float64Val(constant.Real(c.Value))
|
||||
im, _ := constant.Float64Val(constant.Imag(c.Value))
|
||||
|
|
85
vendor/golang.org/x/tools/go/ssa/create.go
generated
vendored
85
vendor/golang.org/x/tools/go/ssa/create.go
generated
vendored
|
@ -16,25 +16,29 @@ import (
|
|||
"sync"
|
||||
|
||||
"golang.org/x/tools/go/types/typeutil"
|
||||
"golang.org/x/tools/internal/typeparams"
|
||||
)
|
||||
|
||||
// NewProgram returns a new SSA Program.
|
||||
//
|
||||
// mode controls diagnostics and checking during SSA construction.
|
||||
//
|
||||
func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
|
||||
prog := &Program{
|
||||
Fset: fset,
|
||||
imported: make(map[string]*Package),
|
||||
packages: make(map[*types.Package]*Package),
|
||||
thunks: make(map[selectionKey]*Function),
|
||||
bounds: make(map[*types.Func]*Function),
|
||||
mode: mode,
|
||||
Fset: fset,
|
||||
imported: make(map[string]*Package),
|
||||
packages: make(map[*types.Package]*Package),
|
||||
thunks: make(map[selectionKey]*Function),
|
||||
bounds: make(map[boundsKey]*Function),
|
||||
mode: mode,
|
||||
canon: newCanonizer(),
|
||||
ctxt: typeparams.NewContext(),
|
||||
instances: make(map[*Function]*instanceSet),
|
||||
parameterized: tpWalker{seen: make(map[types.Type]bool)},
|
||||
}
|
||||
|
||||
h := typeutil.MakeHasher() // protected by methodsMu, in effect
|
||||
prog.methodSets.SetHasher(h)
|
||||
prog.canon.SetHasher(h)
|
||||
prog.runtimeTypes.SetHasher(h)
|
||||
|
||||
return prog
|
||||
}
|
||||
|
@ -45,7 +49,6 @@ func NewProgram(fset *token.FileSet, mode BuilderMode) *Program {
|
|||
// For objects from Go source code, syntax is the associated syntax
|
||||
// tree (for funcs and vars only); it will be used during the build
|
||||
// phase.
|
||||
//
|
||||
func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
||||
name := obj.Name()
|
||||
switch obj := obj.(type) {
|
||||
|
@ -66,7 +69,7 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
|||
Value: NewConst(obj.Val(), obj.Type()),
|
||||
pkg: pkg,
|
||||
}
|
||||
pkg.values[obj] = c.Value
|
||||
pkg.objects[obj] = c
|
||||
pkg.Members[name] = c
|
||||
|
||||
case *types.Var:
|
||||
|
@ -77,7 +80,7 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
|||
typ: types.NewPointer(obj.Type()), // address
|
||||
pos: obj.Pos(),
|
||||
}
|
||||
pkg.values[obj] = g
|
||||
pkg.objects[obj] = g
|
||||
pkg.Members[name] = g
|
||||
|
||||
case *types.Func:
|
||||
|
@ -86,20 +89,41 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
|||
pkg.ninit++
|
||||
name = fmt.Sprintf("init#%d", pkg.ninit)
|
||||
}
|
||||
fn := &Function{
|
||||
name: name,
|
||||
object: obj,
|
||||
Signature: sig,
|
||||
syntax: syntax,
|
||||
pos: obj.Pos(),
|
||||
Pkg: pkg,
|
||||
Prog: pkg.Prog,
|
||||
|
||||
// Collect type parameters if this is a generic function/method.
|
||||
var tparams []*typeparams.TypeParam
|
||||
for i, rtparams := 0, typeparams.RecvTypeParams(sig); i < rtparams.Len(); i++ {
|
||||
tparams = append(tparams, rtparams.At(i))
|
||||
}
|
||||
for i, sigparams := 0, typeparams.ForSignature(sig); i < sigparams.Len(); i++ {
|
||||
tparams = append(tparams, sigparams.At(i))
|
||||
}
|
||||
|
||||
fn := &Function{
|
||||
name: name,
|
||||
object: obj,
|
||||
Signature: sig,
|
||||
syntax: syntax,
|
||||
pos: obj.Pos(),
|
||||
Pkg: pkg,
|
||||
Prog: pkg.Prog,
|
||||
_TypeParams: tparams,
|
||||
info: pkg.info,
|
||||
}
|
||||
pkg.created.Add(fn)
|
||||
if syntax == nil {
|
||||
fn.Synthetic = "loaded from gc object file"
|
||||
}
|
||||
if len(tparams) > 0 {
|
||||
fn.Prog.createInstanceSet(fn)
|
||||
}
|
||||
if len(tparams) > 0 && syntax != nil {
|
||||
fn.Synthetic = "generic function"
|
||||
// TODO(taking): Allow for the function to be built once type params are supported.
|
||||
fn.syntax = nil // Treating as an external function temporarily.
|
||||
}
|
||||
|
||||
pkg.values[obj] = fn
|
||||
pkg.objects[obj] = fn
|
||||
if sig.Recv() == nil {
|
||||
pkg.Members[name] = fn // package-level function
|
||||
}
|
||||
|
@ -112,7 +136,6 @@ func memberFromObject(pkg *Package, obj types.Object, syntax ast.Node) {
|
|||
// membersFromDecl populates package pkg with members for each
|
||||
// typechecker object (var, func, const or type) associated with the
|
||||
// specified decl.
|
||||
//
|
||||
func membersFromDecl(pkg *Package, decl ast.Decl) {
|
||||
switch decl := decl.(type) {
|
||||
case *ast.GenDecl: // import, const, type or var
|
||||
|
@ -152,6 +175,19 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
|
|||
}
|
||||
}
|
||||
|
||||
// creator tracks functions that have finished their CREATE phases.
|
||||
//
|
||||
// All Functions belong to the same Program. May have differing packages.
|
||||
//
|
||||
// creators are not thread-safe.
|
||||
type creator []*Function
|
||||
|
||||
func (c *creator) Add(fn *Function) {
|
||||
*c = append(*c, fn)
|
||||
}
|
||||
func (c *creator) At(i int) *Function { return (*c)[i] }
|
||||
func (c *creator) Len() int { return len(*c) }
|
||||
|
||||
// CreatePackage constructs and returns an SSA Package from the
|
||||
// specified type-checked, error-free file ASTs, and populates its
|
||||
// Members mapping.
|
||||
|
@ -161,12 +197,11 @@ func membersFromDecl(pkg *Package, decl ast.Decl) {
|
|||
//
|
||||
// The real work of building SSA form for each function is not done
|
||||
// until a subsequent call to Package.Build().
|
||||
//
|
||||
func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *types.Info, importable bool) *Package {
|
||||
p := &Package{
|
||||
Prog: prog,
|
||||
Members: make(map[string]Member),
|
||||
values: make(map[types.Object]Value),
|
||||
objects: make(map[types.Object]Member),
|
||||
Pkg: pkg,
|
||||
info: info, // transient (CREATE and BUILD phases)
|
||||
files: files, // transient (CREATE and BUILD phases)
|
||||
|
@ -179,8 +214,10 @@ func (prog *Program) CreatePackage(pkg *types.Package, files []*ast.File, info *
|
|||
Synthetic: "package initializer",
|
||||
Pkg: p,
|
||||
Prog: prog,
|
||||
info: p.info,
|
||||
}
|
||||
p.Members[p.init.name] = p.init
|
||||
p.created.Add(p.init)
|
||||
|
||||
// CREATE phase.
|
||||
// Allocate all package members: vars, funcs, consts and types.
|
||||
|
@ -242,7 +279,6 @@ var printMu sync.Mutex
|
|||
|
||||
// AllPackages returns a new slice containing all packages in the
|
||||
// program prog in unspecified order.
|
||||
//
|
||||
func (prog *Program) AllPackages() []*Package {
|
||||
pkgs := make([]*Package, 0, len(prog.packages))
|
||||
for _, pkg := range prog.packages {
|
||||
|
@ -264,7 +300,6 @@ func (prog *Program) AllPackages() []*Package {
|
|||
// false---yet this function remains very convenient.
|
||||
// Clients should use (*Program).Package instead where possible.
|
||||
// SSA doesn't really need a string-keyed map of packages.
|
||||
//
|
||||
func (prog *Program) ImportedPackage(path string) *Package {
|
||||
return prog.imported[path]
|
||||
}
|
||||
|
|
99
vendor/golang.org/x/tools/go/ssa/doc.go
generated
vendored
99
vendor/golang.org/x/tools/go/ssa/doc.go
generated
vendored
|
@ -41,60 +41,60 @@
|
|||
//
|
||||
// The primary interfaces of this package are:
|
||||
//
|
||||
// - Member: a named member of a Go package.
|
||||
// - Value: an expression that yields a value.
|
||||
// - Instruction: a statement that consumes values and performs computation.
|
||||
// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
|
||||
// - Member: a named member of a Go package.
|
||||
// - Value: an expression that yields a value.
|
||||
// - Instruction: a statement that consumes values and performs computation.
|
||||
// - Node: a Value or Instruction (emphasizing its membership in the SSA value graph)
|
||||
//
|
||||
// A computation that yields a result implements both the Value and
|
||||
// Instruction interfaces. The following table shows for each
|
||||
// concrete type which of these interfaces it implements.
|
||||
//
|
||||
// Value? Instruction? Member?
|
||||
// *Alloc ✔ ✔
|
||||
// *BinOp ✔ ✔
|
||||
// *Builtin ✔
|
||||
// *Call ✔ ✔
|
||||
// *ChangeInterface ✔ ✔
|
||||
// *ChangeType ✔ ✔
|
||||
// *Const ✔
|
||||
// *Convert ✔ ✔
|
||||
// *DebugRef ✔
|
||||
// *Defer ✔
|
||||
// *Extract ✔ ✔
|
||||
// *Field ✔ ✔
|
||||
// *FieldAddr ✔ ✔
|
||||
// *FreeVar ✔
|
||||
// *Function ✔ ✔ (func)
|
||||
// *Global ✔ ✔ (var)
|
||||
// *Go ✔
|
||||
// *If ✔
|
||||
// *Index ✔ ✔
|
||||
// *IndexAddr ✔ ✔
|
||||
// *Jump ✔
|
||||
// *Lookup ✔ ✔
|
||||
// *MakeChan ✔ ✔
|
||||
// *MakeClosure ✔ ✔
|
||||
// *MakeInterface ✔ ✔
|
||||
// *MakeMap ✔ ✔
|
||||
// *MakeSlice ✔ ✔
|
||||
// *MapUpdate ✔
|
||||
// *NamedConst ✔ (const)
|
||||
// *Next ✔ ✔
|
||||
// *Panic ✔
|
||||
// *Parameter ✔
|
||||
// *Phi ✔ ✔
|
||||
// *Range ✔ ✔
|
||||
// *Return ✔
|
||||
// *RunDefers ✔
|
||||
// *Select ✔ ✔
|
||||
// *Send ✔
|
||||
// *Slice ✔ ✔
|
||||
// *SliceToArrayPointer ✔ ✔
|
||||
// *Store ✔
|
||||
// *Type ✔ (type)
|
||||
// *TypeAssert ✔ ✔
|
||||
// *UnOp ✔ ✔
|
||||
// Value? Instruction? Member?
|
||||
// *Alloc ✔ ✔
|
||||
// *BinOp ✔ ✔
|
||||
// *Builtin ✔
|
||||
// *Call ✔ ✔
|
||||
// *ChangeInterface ✔ ✔
|
||||
// *ChangeType ✔ ✔
|
||||
// *Const ✔
|
||||
// *Convert ✔ ✔
|
||||
// *DebugRef ✔
|
||||
// *Defer ✔
|
||||
// *Extract ✔ ✔
|
||||
// *Field ✔ ✔
|
||||
// *FieldAddr ✔ ✔
|
||||
// *FreeVar ✔
|
||||
// *Function ✔ ✔ (func)
|
||||
// *Global ✔ ✔ (var)
|
||||
// *Go ✔
|
||||
// *If ✔
|
||||
// *Index ✔ ✔
|
||||
// *IndexAddr ✔ ✔
|
||||
// *Jump ✔
|
||||
// *Lookup ✔ ✔
|
||||
// *MakeChan ✔ ✔
|
||||
// *MakeClosure ✔ ✔
|
||||
// *MakeInterface ✔ ✔
|
||||
// *MakeMap ✔ ✔
|
||||
// *MakeSlice ✔ ✔
|
||||
// *MapUpdate ✔
|
||||
// *NamedConst ✔ (const)
|
||||
// *Next ✔ ✔
|
||||
// *Panic ✔
|
||||
// *Parameter ✔
|
||||
// *Phi ✔ ✔
|
||||
// *Range ✔ ✔
|
||||
// *Return ✔
|
||||
// *RunDefers ✔
|
||||
// *Select ✔ ✔
|
||||
// *Send ✔
|
||||
// *Slice ✔ ✔
|
||||
// *SliceToArrayPointer ✔ ✔
|
||||
// *Store ✔
|
||||
// *Type ✔ (type)
|
||||
// *TypeAssert ✔ ✔
|
||||
// *UnOp ✔ ✔
|
||||
//
|
||||
// Other key types in this package include: Program, Package, Function
|
||||
// and BasicBlock.
|
||||
|
@ -122,5 +122,4 @@
|
|||
// of trying to determine corresponding elements across the four
|
||||
// domains of source locations, ast.Nodes, types.Objects,
|
||||
// ssa.Values/Instructions.
|
||||
//
|
||||
package ssa // import "golang.org/x/tools/go/ssa"
|
||||
|
|
6
vendor/golang.org/x/tools/go/ssa/dom.go
generated
vendored
6
vendor/golang.org/x/tools/go/ssa/dom.go
generated
vendored
|
@ -29,12 +29,10 @@ import (
|
|||
// its parent in the dominator tree, if any.
|
||||
// Neither the entry node (b.Index==0) nor recover node
|
||||
// (b==b.Parent().Recover()) have a parent.
|
||||
//
|
||||
func (b *BasicBlock) Idom() *BasicBlock { return b.dom.idom }
|
||||
|
||||
// Dominees returns the list of blocks that b immediately dominates:
|
||||
// its children in the dominator tree.
|
||||
//
|
||||
func (b *BasicBlock) Dominees() []*BasicBlock { return b.dom.children }
|
||||
|
||||
// Dominates reports whether b dominates c.
|
||||
|
@ -50,7 +48,6 @@ func (a byDomPreorder) Less(i, j int) bool { return a[i].dom.pre < a[j].dom.pre
|
|||
|
||||
// DomPreorder returns a new slice containing the blocks of f in
|
||||
// dominator tree preorder.
|
||||
//
|
||||
func (f *Function) DomPreorder() []*BasicBlock {
|
||||
n := len(f.Blocks)
|
||||
order := make(byDomPreorder, n)
|
||||
|
@ -110,7 +107,6 @@ func (lt *ltState) link(v, w *BasicBlock) {
|
|||
|
||||
// buildDomTree computes the dominator tree of f using the LT algorithm.
|
||||
// Precondition: all blocks are reachable (e.g. optimizeBlocks has been run).
|
||||
//
|
||||
func buildDomTree(f *Function) {
|
||||
// The step numbers refer to the original LT paper; the
|
||||
// reordering is due to Georgiadis.
|
||||
|
@ -210,7 +206,6 @@ func buildDomTree(f *Function) {
|
|||
// numberDomTree sets the pre- and post-order numbers of a depth-first
|
||||
// traversal of the dominator tree rooted at v. These are used to
|
||||
// answer dominance queries in constant time.
|
||||
//
|
||||
func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
|
||||
v.dom.pre = pre
|
||||
pre++
|
||||
|
@ -228,7 +223,6 @@ func numberDomTree(v *BasicBlock, pre, post int32) (int32, int32) {
|
|||
// computed by the LT algorithm by comparing against the dominance
|
||||
// relation computed by a naive Kildall-style forward dataflow
|
||||
// analysis (Algorithm 10.16 from the "Dragon" book).
|
||||
//
|
||||
func sanityCheckDomTree(f *Function) {
|
||||
n := len(f.Blocks)
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue