diff --git a/agent/runner.go b/agent/runner.go index 76e9f3b55..518843bff 100644 --- a/agent/runner.go +++ b/agent/runner.go @@ -23,15 +23,14 @@ import ( "sync" "time" + "github.com/rs/zerolog/log" + "github.com/tevino/abool" "google.golang.org/grpc/metadata" "github.com/woodpecker-ci/woodpecker/pipeline" "github.com/woodpecker-ci/woodpecker/pipeline/backend" "github.com/woodpecker-ci/woodpecker/pipeline/multipart" "github.com/woodpecker-ci/woodpecker/pipeline/rpc" - - "github.com/rs/zerolog/log" - "github.com/tevino/abool" ) // TODO: Implement log streaming. diff --git a/cli/build/build_approve.go b/cli/build/build_approve.go index 505f10cf7..2f10702e0 100644 --- a/cli/build/build_approve.go +++ b/cli/build/build_approve.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_decline.go b/cli/build/build_decline.go index 9ff0be443..525a398a0 100644 --- a/cli/build/build_decline.go +++ b/cli/build/build_decline.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_info.go b/cli/build/build_info.go index 640af5362..150eed371 100644 --- a/cli/build/build_info.go +++ b/cli/build/build_info.go @@ -6,6 +6,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_kill.go b/cli/build/build_kill.go index 8902c319d..01b66c1a4 100644 --- a/cli/build/build_kill.go +++ b/cli/build/build_kill.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_last.go b/cli/build/build_last.go index 1ef038658..c7918bef5 100644 --- a/cli/build/build_last.go +++ b/cli/build/build_last.go @@ -5,6 +5,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_list.go b/cli/build/build_list.go index 65c96c3c9..eeb9b6dab 100644 --- a/cli/build/build_list.go +++ b/cli/build/build_list.go @@ -5,6 +5,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_ps.go b/cli/build/build_ps.go index 6b4f37324..533c0490f 100644 --- a/cli/build/build_ps.go +++ b/cli/build/build_ps.go @@ -6,6 +6,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_queue.go b/cli/build/build_queue.go index 58372d4f1..663467a71 100644 --- a/cli/build/build_queue.go +++ b/cli/build/build_queue.go @@ -6,6 +6,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_start.go b/cli/build/build_start.go index bcd299de0..c90440a7d 100644 --- a/cli/build/build_start.go +++ b/cli/build/build_start.go @@ -6,6 +6,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/build/build_stop.go b/cli/build/build_stop.go index 419758eda..bdd25762c 100644 --- a/cli/build/build_stop.go +++ b/cli/build/build_stop.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/deploy/deploy.go b/cli/deploy/deploy.go index 1f4d6c89f..757a32505 100644 --- a/cli/deploy/deploy.go +++ b/cli/deploy/deploy.go @@ -6,10 +6,10 @@ import ( "os" "strconv" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) // Command exports the deploy command. diff --git a/cli/exec/exec.go b/cli/exec/exec.go index 2fade5a46..105a87de2 100644 --- a/cli/exec/exec.go +++ b/cli/exec/exec.go @@ -5,13 +5,15 @@ import ( "fmt" "io" "io/ioutil" - "log" "path" "path/filepath" "runtime" "strings" "github.com/drone/envsubst" + "github.com/rs/zerolog/log" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/pipeline" "github.com/woodpecker-ci/woodpecker/pipeline/backend" "github.com/woodpecker-ci/woodpecker/pipeline/backend/docker" @@ -22,8 +24,6 @@ import ( "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml/matrix" "github.com/woodpecker-ci/woodpecker/pipeline/interrupt" "github.com/woodpecker-ci/woodpecker/pipeline/multipart" - - "github.com/urfave/cli" ) // Command exports the exec command. @@ -33,7 +33,7 @@ var Command = cli.Command{ ArgsUsage: "[path/to/.woodpecker.yml]", Action: func(c *cli.Context) { if err := exec(c); err != nil { - log.Fatalln(err) + log.Fatal().Err(err).Msg("") } }, Flags: flags, diff --git a/cli/lint/lint.go b/cli/lint/lint.go index 04668ff1d..ff4b418f2 100644 --- a/cli/lint/lint.go +++ b/cli/lint/lint.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/pipeline/schema" ) diff --git a/cli/log/log_purge.go b/cli/log/log_purge.go index c4802a451..ab9c2594f 100644 --- a/cli/log/log_purge.go +++ b/cli/log/log_purge.go @@ -5,6 +5,7 @@ import ( "strconv" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/registry/registry_add.go b/cli/registry/registry_add.go index 306cc34dd..4e20106b2 100644 --- a/cli/registry/registry_add.go +++ b/cli/registry/registry_add.go @@ -4,10 +4,10 @@ import ( "io/ioutil" "strings" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) var registryCreateCmd = cli.Command{ diff --git a/cli/registry/registry_info.go b/cli/registry/registry_info.go index bcf71a7a7..e07a4e923 100644 --- a/cli/registry/registry_info.go +++ b/cli/registry/registry_info.go @@ -4,9 +4,9 @@ import ( "html/template" "os" - "github.com/woodpecker-ci/woodpecker/cli/internal" - "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/cli/internal" ) var registryInfoCmd = cli.Command{ diff --git a/cli/registry/registry_rm.go b/cli/registry/registry_rm.go index e9d9fb1c8..7e9f98fea 100644 --- a/cli/registry/registry_rm.go +++ b/cli/registry/registry_rm.go @@ -1,9 +1,9 @@ package registry import ( - "github.com/woodpecker-ci/woodpecker/cli/internal" - "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/cli/internal" ) var registryDeleteCmd = cli.Command{ diff --git a/cli/registry/registry_set.go b/cli/registry/registry_set.go index a0ec8e918..962bb965a 100644 --- a/cli/registry/registry_set.go +++ b/cli/registry/registry_set.go @@ -4,10 +4,10 @@ import ( "io/ioutil" "strings" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) var registryUpdateCmd = cli.Command{ diff --git a/cli/repo/repo_add.go b/cli/repo/repo_add.go index 39ed116e6..a2e0fcfe4 100644 --- a/cli/repo/repo_add.go +++ b/cli/repo/repo_add.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_chown.go b/cli/repo/repo_chown.go index 45ce0ddcf..0845f1d57 100644 --- a/cli/repo/repo_chown.go +++ b/cli/repo/repo_chown.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_info.go b/cli/repo/repo_info.go index 971d834d5..4dc7632e6 100644 --- a/cli/repo/repo_info.go +++ b/cli/repo/repo_info.go @@ -5,6 +5,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_list.go b/cli/repo/repo_list.go index b60a3f1e4..6d2f7b207 100644 --- a/cli/repo/repo_list.go +++ b/cli/repo/repo_list.go @@ -5,6 +5,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_repair.go b/cli/repo/repo_repair.go index 6b06d483d..25bc16961 100644 --- a/cli/repo/repo_repair.go +++ b/cli/repo/repo_repair.go @@ -2,6 +2,7 @@ package repo import ( "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_rm.go b/cli/repo/repo_rm.go index 2e2e36f35..e434e21e4 100644 --- a/cli/repo/repo_rm.go +++ b/cli/repo/repo_rm.go @@ -3,9 +3,9 @@ package repo import ( "fmt" - "github.com/woodpecker-ci/woodpecker/cli/internal" - "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/cli/internal" ) var repoRemoveCmd = cli.Command{ diff --git a/cli/repo/repo_sync.go b/cli/repo/repo_sync.go index 2a6feab92..f152c2b71 100644 --- a/cli/repo/repo_sync.go +++ b/cli/repo/repo_sync.go @@ -5,6 +5,7 @@ import ( "text/template" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" ) diff --git a/cli/repo/repo_update.go b/cli/repo/repo_update.go index fb7b647a2..852575cef 100644 --- a/cli/repo/repo_update.go +++ b/cli/repo/repo_update.go @@ -4,10 +4,10 @@ import ( "fmt" "time" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) var repoUpdateCmd = cli.Command{ diff --git a/cli/secret/secret_add.go b/cli/secret/secret_add.go index 9412e4970..73945a7d0 100644 --- a/cli/secret/secret_add.go +++ b/cli/secret/secret_add.go @@ -4,10 +4,10 @@ import ( "io/ioutil" "strings" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) var secretCreateCmd = cli.Command{ diff --git a/cli/secret/secret_set.go b/cli/secret/secret_set.go index 360b2174c..29ea9d48f 100644 --- a/cli/secret/secret_set.go +++ b/cli/secret/secret_set.go @@ -4,10 +4,10 @@ import ( "io/ioutil" "strings" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/internal" "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" - - "github.com/urfave/cli" ) var secretUpdateCmd = cli.Command{ diff --git a/cli/user/user_add.go b/cli/user/user_add.go index 83f3bd289..3978d5944 100644 --- a/cli/user/user_add.go +++ b/cli/user/user_add.go @@ -4,9 +4,9 @@ import ( "fmt" "github.com/urfave/cli" - "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" "github.com/woodpecker-ci/woodpecker/cli/internal" + "github.com/woodpecker-ci/woodpecker/woodpecker-go/woodpecker" ) var userAddCmd = cli.Command{ diff --git a/cmd/agent/agent.go b/cmd/agent/agent.go index c76829552..507077953 100644 --- a/cmd/agent/agent.go +++ b/cmd/agent/agent.go @@ -21,21 +21,19 @@ import ( "os" "sync" - grpccredentials "google.golang.org/grpc/credentials" - + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "github.com/tevino/abool" + "github.com/urfave/cli" + oldcontext "golang.org/x/net/context" "google.golang.org/grpc" + grpccredentials "google.golang.org/grpc/credentials" "google.golang.org/grpc/keepalive" "google.golang.org/grpc/metadata" "github.com/woodpecker-ci/woodpecker/agent" "github.com/woodpecker-ci/woodpecker/pipeline/backend/docker" "github.com/woodpecker-ci/woodpecker/pipeline/rpc" - - "github.com/rs/zerolog" - "github.com/rs/zerolog/log" - "github.com/tevino/abool" - "github.com/urfave/cli" - oldcontext "golang.org/x/net/context" ) func loop(c *cli.Context) error { diff --git a/cmd/agent/health.go b/cmd/agent/health.go index 282d222ab..eba638f7b 100644 --- a/cmd/agent/health.go +++ b/cmd/agent/health.go @@ -20,6 +20,7 @@ import ( "net/http" "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/agent" "github.com/woodpecker-ci/woodpecker/version" ) diff --git a/cmd/agent/main.go b/cmd/agent/main.go index 3a1c6e996..57c6c7832 100644 --- a/cmd/agent/main.go +++ b/cmd/agent/main.go @@ -18,10 +18,10 @@ import ( "fmt" "os" - "github.com/woodpecker-ci/woodpecker/version" - _ "github.com/joho/godotenv/autoload" "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/version" ) func main() { diff --git a/cmd/cli/main.go b/cmd/cli/main.go index d0f3adef4..cd1769c61 100644 --- a/cmd/cli/main.go +++ b/cmd/cli/main.go @@ -18,6 +18,9 @@ import ( "fmt" "os" + _ "github.com/joho/godotenv/autoload" + "github.com/urfave/cli" + "github.com/woodpecker-ci/woodpecker/cli/build" "github.com/woodpecker-ci/woodpecker/cli/deploy" "github.com/woodpecker-ci/woodpecker/cli/exec" @@ -29,9 +32,6 @@ import ( "github.com/woodpecker-ci/woodpecker/cli/secret" "github.com/woodpecker-ci/woodpecker/cli/user" "github.com/woodpecker-ci/woodpecker/version" - - _ "github.com/joho/godotenv/autoload" - "github.com/urfave/cli" ) func main() { diff --git a/cmd/server/flags.go b/cmd/server/flags.go index 1ef45ce5a..18d179a70 100644 --- a/cmd/server/flags.go +++ b/cmd/server/flags.go @@ -26,6 +26,16 @@ var flags = []cli.Flag{ Name: "debug", Usage: "enable server debug mode", }, + cli.BoolFlag{ + EnvVar: "WOODPECKER_DEBUG_PRETTY", + Name: "pretty", + Usage: "enable pretty-printed debug output", + }, + cli.BoolTFlag{ + EnvVar: "WOODPECKER_DEBUG_NOCOLOR", + Name: "nocolor", + Usage: "disable colored debug output", + }, cli.StringFlag{ EnvVar: "WOODPECKER_HOST", Name: "server-host", diff --git a/cmd/server/main.go b/cmd/server/main.go index 48dc33389..fc92e8aea 100644 --- a/cmd/server/main.go +++ b/cmd/server/main.go @@ -18,11 +18,11 @@ import ( "fmt" "os" - "github.com/woodpecker-ci/woodpecker/version" - "github.com/joho/godotenv" _ "github.com/joho/godotenv/autoload" "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/version" ) func main() { diff --git a/cmd/server/server.go b/cmd/server/server.go index 2d76a8ae9..3d3072461 100644 --- a/cmd/server/server.go +++ b/cmd/server/server.go @@ -27,13 +27,16 @@ import ( "strings" "time" + "github.com/rs/zerolog" + "github.com/rs/zerolog/log" + "github.com/urfave/cli" + "golang.org/x/crypto/acme/autocert" + oldcontext "golang.org/x/net/context" + "golang.org/x/sync/errgroup" "google.golang.org/grpc" "google.golang.org/grpc/keepalive" "google.golang.org/grpc/metadata" - "golang.org/x/crypto/acme/autocert" - "golang.org/x/sync/errgroup" - "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" "github.com/woodpecker-ci/woodpecker/server" woodpeckerGrpcServer "github.com/woodpecker-ci/woodpecker/server/grpc" @@ -43,12 +46,8 @@ import ( "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/router" "github.com/woodpecker-ci/woodpecker/server/router/middleware" + "github.com/woodpecker-ci/woodpecker/server/router/middleware/logger" "github.com/woodpecker-ci/woodpecker/server/store" - - "github.com/gin-gonic/contrib/ginrus" - "github.com/sirupsen/logrus" - "github.com/urfave/cli" - oldcontext "golang.org/x/net/context" ) func loop(c *cli.Context) error { @@ -56,37 +55,45 @@ func loop(c *cli.Context) error { // debug level if requested by user // TODO: format output & options to switch to json aka. option to add channels to send logs to if c.Bool("debug") { - logrus.SetReportCaller(true) - logrus.SetLevel(logrus.DebugLevel) + zerolog.SetGlobalLevel(zerolog.DebugLevel) } else { - logrus.SetLevel(logrus.WarnLevel) + zerolog.SetGlobalLevel(zerolog.WarnLevel) + } + + if c.Bool("pretty") { + log.Logger = log.Output( + zerolog.ConsoleWriter{ + Out: os.Stderr, + NoColor: c.BoolT("nocolor"), + }, + ) } if c.String("server-host") == "" { - logrus.Fatalln("WOODPECKER_HOST is not properly configured") + log.Fatal().Msg("WOODPECKER_HOST is not properly configured") } if !strings.Contains(c.String("server-host"), "://") { - logrus.Fatalln( + log.Fatal().Msg( "WOODPECKER_HOST must be :// format", ) } if strings.Contains(c.String("server-host"), "://localhost") { - logrus.Warningln( + log.Warn().Msg( "WOODPECKER_HOST should probably be publicly accessible (not localhost)", ) } if strings.HasSuffix(c.String("server-host"), "/") { - logrus.Fatalln( + log.Fatal().Msg( "WOODPECKER_HOST must not have trailing slash", ) } remote_, err := SetupRemote(c) if err != nil { - logrus.Fatal(err) + log.Fatal().Err(err).Msg("") } store_ := setupStore(c) @@ -116,7 +123,7 @@ func loop(c *cli.Context) error { // setup the server and start the listener handler := router.Load( webUIServe, - ginrus.Ginrus(logrus.StandardLogger(), time.RFC3339, true), + logger.Logger(time.RFC3339, true), middleware.Version, middleware.Config(c), middleware.Store(c, store_), @@ -130,7 +137,7 @@ func loop(c *cli.Context) error { lis, err := net.Listen("tcp", c.String("grpc-addr")) if err != nil { - logrus.Error(err) + log.Err(err).Msg("") return err } auther := &authorizer{ @@ -155,7 +162,7 @@ func loop(c *cli.Context) error { err = grpcServer.Serve(lis) if err != nil { - logrus.Error(err) + log.Err(err).Msg("") return err } return nil diff --git a/cmd/server/setup.go b/cmd/server/setup.go index 9b9e4c873..7c53f4e53 100644 --- a/cmd/server/setup.go +++ b/cmd/server/setup.go @@ -19,6 +19,12 @@ import ( "time" "github.com/gin-gonic/gin" + "github.com/prometheus/client_golang/prometheus" + "github.com/prometheus/client_golang/prometheus/promauto" + "github.com/rs/zerolog/log" + "github.com/urfave/cli" + "golang.org/x/sync/errgroup" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/plugins/environments" @@ -36,12 +42,6 @@ import ( "github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/server/store/datastore" "github.com/woodpecker-ci/woodpecker/server/web" - - "github.com/prometheus/client_golang/prometheus" - "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/sirupsen/logrus" - "github.com/urfave/cli" - "golang.org/x/sync/errgroup" ) func setupStore(c *cli.Context) store.Store { @@ -128,7 +128,7 @@ func setupGitea(c *cli.Context) (remote.Remote, error) { SkipVerify: c.Bool("gitea-skip-verify"), } if len(opts.URL) == 0 { - logrus.Fatalln("WOODPECKER_GITEA_URL must be set") + log.Fatal().Msg("WOODPECKER_GITEA_URL must be set") } return gitea.New(opts) } diff --git a/go.mod b/go.mod index c288cbe18..60144a3f4 100644 --- a/go.mod +++ b/go.mod @@ -19,7 +19,6 @@ require ( github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568 // indirect github.com/franela/goblin v0.0.0-20200512143142-b260c999b2d7 github.com/ghodss/yaml v1.0.0 - github.com/gin-gonic/contrib v0.0.0-20191209060500-d6e26eeaa607 github.com/gin-gonic/gin v1.7.4 github.com/go-playground/validator/v10 v10.9.0 // indirect github.com/go-sql-driver/mysql v1.6.0 @@ -43,7 +42,6 @@ require ( github.com/prometheus/client_golang v1.7.1 github.com/rs/zerolog v1.25.0 github.com/russross/meddler v1.0.1 - github.com/sirupsen/logrus v1.8.1 github.com/stretchr/objx v0.3.0 // indirect github.com/stretchr/testify v1.7.0 github.com/tevino/abool v0.0.0-20170917061928-9b9efcf221b5 diff --git a/go.sum b/go.sum index d5912ae91..de9e0ec9f 100644 --- a/go.sum +++ b/go.sum @@ -283,8 +283,6 @@ github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE= github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI= -github.com/gin-gonic/contrib v0.0.0-20191209060500-d6e26eeaa607 h1:MrIm8EEPue08JS4eh+b08IOG+wd0WRWEHWnewNfWFX0= -github.com/gin-gonic/contrib v0.0.0-20191209060500-d6e26eeaa607/go.mod h1:iqneQ2Df3omzIVTkIfn7c1acsVnMGiSLn4XF5Blh3Yg= github.com/gin-gonic/gin v1.7.4 h1:QmUZXrvJ9qZ3GfWvQ+2wnW/1ePrTEJqPKMYEU3lD/DM= github.com/gin-gonic/gin v1.7.4/go.mod h1:jD2toBW3GZUr5UMcdrwQA10I7RuaFOl/SGeDjXkfUtY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= diff --git a/pipeline/backend/docker/pool.go b/pipeline/backend/docker/pool.go index 8160bc114..31ab40e4d 100644 --- a/pipeline/backend/docker/pool.go +++ b/pipeline/backend/docker/pool.go @@ -1,9 +1,7 @@ package docker // import ( -// "context" -// -// "github.com/woodpecker-ci/woodpecker/pipeline/backend" + // ) // // // Pool manages a pool of Docker clients. diff --git a/pipeline/frontend/yaml/compiler/cacher.go b/pipeline/frontend/yaml/compiler/cacher.go index 5b9cd449a..15c3122d7 100644 --- a/pipeline/frontend/yaml/compiler/cacher.go +++ b/pipeline/frontend/yaml/compiler/cacher.go @@ -4,9 +4,9 @@ import ( "path" "strings" - "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml" - libcompose "github.com/docker/libcompose/yaml" + + "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml" ) // Cacher defines a compiler transform that can be used diff --git a/pipeline/frontend/yaml/compiler/convert.go b/pipeline/frontend/yaml/compiler/convert.go index 115eee5f6..592a72af7 100644 --- a/pipeline/frontend/yaml/compiler/convert.go +++ b/pipeline/frontend/yaml/compiler/convert.go @@ -25,7 +25,7 @@ func (c *Compiler) createProcess(name string, container *yaml.Container, section ) networks := []backend.Conn{ - backend.Conn{ + { Name: fmt.Sprintf("%s_default", c.prefix), Aliases: []string{container.Name}, }, diff --git a/pipeline/frontend/yaml/constraint.go b/pipeline/frontend/yaml/constraint.go index 41f04b207..ac7150755 100644 --- a/pipeline/frontend/yaml/constraint.go +++ b/pipeline/frontend/yaml/constraint.go @@ -7,9 +7,10 @@ import ( "github.com/bmatcuk/doublestar/v4" libcompose "github.com/docker/libcompose/yaml" + "gopkg.in/yaml.v3" + "github.com/woodpecker-ci/woodpecker/pipeline/frontend" "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml/types" - "gopkg.in/yaml.v3" ) type ( diff --git a/pipeline/frontend/yaml/constraint_test.go b/pipeline/frontend/yaml/constraint_test.go index 5c5a01153..09e97b76b 100644 --- a/pipeline/frontend/yaml/constraint_test.go +++ b/pipeline/frontend/yaml/constraint_test.go @@ -3,9 +3,9 @@ package yaml import ( "testing" - "github.com/woodpecker-ci/woodpecker/pipeline/frontend" - "gopkg.in/yaml.v3" + + "github.com/woodpecker-ci/woodpecker/pipeline/frontend" ) func TestConstraint(t *testing.T) { diff --git a/pipeline/multipart/coverage/coverage.go b/pipeline/multipart/coverage/coverage.go index d5c73bd34..b7e93d367 100644 --- a/pipeline/multipart/coverage/coverage.go +++ b/pipeline/multipart/coverage/coverage.go @@ -3,10 +3,9 @@ package coverage import ( "encoding/json" "fmt" - "strconv" - "mime/multipart" "net/textproto" + "strconv" ) // MimeType used by coverage reports. diff --git a/pipeline/rpc/client_grpc.go b/pipeline/rpc/client_grpc.go index 74e0e9790..a9440d794 100644 --- a/pipeline/rpc/client_grpc.go +++ b/pipeline/rpc/client_grpc.go @@ -3,15 +3,15 @@ package rpc import ( "context" "encoding/json" - "log" "time" - "github.com/woodpecker-ci/woodpecker/pipeline/backend" - "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" - + "github.com/rs/zerolog/log" "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" + + "github.com/woodpecker-ci/woodpecker/pipeline/backend" + "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" ) var backoff = time.Second @@ -46,7 +46,7 @@ func (c *client) Next(ctx context.Context, f Filter) (*Pipeline, error) { if err == nil { break } else { - log.Printf("grpc error: done(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: done(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -86,7 +86,7 @@ func (c *client) Wait(ctx context.Context, id string) (err error) { if err == nil { break } else { - log.Printf("grpc error: wait(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: wait(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -120,7 +120,7 @@ func (c *client) Init(ctx context.Context, id string, state State) (err error) { if err == nil { break } else { - log.Printf("grpc error: init(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: init(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -154,7 +154,7 @@ func (c *client) Done(ctx context.Context, id string, state State) (err error) { if err == nil { break } else { - log.Printf("grpc error: done(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: done(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -181,7 +181,7 @@ func (c *client) Extend(ctx context.Context, id string) (err error) { if err == nil { break } else { - log.Printf("grpc error: extend(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: extend(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -215,7 +215,7 @@ func (c *client) Update(ctx context.Context, id string, state State) (err error) if err == nil { break } else { - log.Printf("grpc error: update(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: update(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -250,7 +250,7 @@ func (c *client) Upload(ctx context.Context, id string, file *File) (err error) if err == nil { break } else { - log.Printf("grpc error: upload(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: upload(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case @@ -282,7 +282,7 @@ func (c *client) Log(ctx context.Context, id string, line *Line) (err error) { if err == nil { break } else { - log.Printf("grpc error: log(): code: %v: %s", status.Code(err), err) + log.Err(err).Msgf("grpc error: log(): code: %v: %s", status.Code(err), err) } switch status.Code(err) { case diff --git a/pipeline/rpc/client_grpc_health.go b/pipeline/rpc/client_grpc_health.go index 7bf2f6855..b14eeb250 100644 --- a/pipeline/rpc/client_grpc_health.go +++ b/pipeline/rpc/client_grpc_health.go @@ -2,15 +2,13 @@ package rpc import ( "context" - // "encoding/json" "time" - // "github.com/woodpecker-ci/woodpecker/pipeline/backend" - "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" - "google.golang.org/grpc" "google.golang.org/grpc/codes" "google.golang.org/grpc/status" + + "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" ) // generate protobuffs diff --git a/pipeline/rpc/proto/woodpecker.pb.go b/pipeline/rpc/proto/woodpecker.pb.go index 391b13ef8..33fefc451 100644 --- a/pipeline/rpc/proto/woodpecker.pb.go +++ b/pipeline/rpc/proto/woodpecker.pb.go @@ -1,7 +1,7 @@ // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.27.1 -// protoc v3.17.3 +// protoc v3.18.1 // source: woodpecker.proto package proto diff --git a/pipeline/rpc/proto/woodpecker_grpc.pb.go b/pipeline/rpc/proto/woodpecker_grpc.pb.go index 83c69d4d5..c853f2b66 100644 --- a/pipeline/rpc/proto/woodpecker_grpc.pb.go +++ b/pipeline/rpc/proto/woodpecker_grpc.pb.go @@ -1,4 +1,8 @@ // Code generated by protoc-gen-go-grpc. DO NOT EDIT. +// versions: +// - protoc-gen-go-grpc v1.1.0 +// - protoc v3.18.1 +// source: woodpecker.proto package proto diff --git a/pipeline/schema/schema.go b/pipeline/schema/schema.go index 53dd85344..198afdc61 100644 --- a/pipeline/schema/schema.go +++ b/pipeline/schema/schema.go @@ -4,8 +4,9 @@ import ( _ "embed" "fmt" - "github.com/woodpecker-ci/woodpecker/shared/yml" "github.com/xeipuuv/gojsonschema" + + "github.com/woodpecker-ci/woodpecker/shared/yml" ) //go:embed schema.json diff --git a/pipeline/schema/schema_test.go b/pipeline/schema/schema_test.go index 3b93bb222..ce6ba49ba 100644 --- a/pipeline/schema/schema_test.go +++ b/pipeline/schema/schema_test.go @@ -4,9 +4,9 @@ import ( "fmt" "testing" - "github.com/woodpecker-ci/woodpecker/pipeline/schema" - "github.com/stretchr/testify/assert" + + "github.com/woodpecker-ci/woodpecker/pipeline/schema" ) func TestSchema(t *testing.T) { diff --git a/server/api/badge.go b/server/api/badge.go index fa7a3fc82..8a89161bc 100644 --- a/server/api/badge.go +++ b/server/api/badge.go @@ -20,8 +20,9 @@ package api import ( "fmt" + "github.com/rs/zerolog/log" + "github.com/gin-gonic/gin" - log "github.com/sirupsen/logrus" "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" @@ -60,7 +61,7 @@ func GetBadge(c *gin.Context) { build, err := store.GetBuildLast(c, repo, branch) if err != nil { - log.Warning(err) + log.Warn().Err(err).Msg("") c.String(200, badgeNone) return } diff --git a/server/api/build.go b/server/api/build.go index 848bd6ce0..27e4713f3 100644 --- a/server/api/build.go +++ b/server/api/build.go @@ -22,13 +22,13 @@ import ( "context" "fmt" "io" - "log" "net/http" "strconv" "time" "github.com/gin-gonic/gin" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/queue" @@ -208,11 +208,11 @@ func DeleteBuild(c *gin.Context) { if proc.State == model.StatusPending { if proc.PPID != 0 { if _, err = shared.UpdateProcToStatusSkipped(store.FromContext(c), *proc, 0); err != nil { - log.Printf("error: done: cannot update proc_id %d state: %s", proc.ID, err) + log.Error().Msgf("error: done: cannot update proc_id %d state: %s", proc.ID, err) } } else { if _, err = shared.UpdateProcToStatusKilled(store.FromContext(c), *proc); err != nil { - log.Printf("error: done: cannot update proc_id %d state: %s", proc.ID, err) + log.Error().Msgf("error: done: cannot update proc_id %d state: %s", proc.ID, err) } } } @@ -262,7 +262,7 @@ func PostApproval(c *gin.Context) { // fetch the build file from the database configs, err := server.Config.Storage.Config.ConfigsForBuild(build.ID) if err != nil { - logrus.Errorf("failure to get build config for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to get build config for %s. %s", repo.FullName, err) c.AbortWithError(404, err) return } @@ -285,11 +285,11 @@ func PostApproval(c *gin.Context) { last, _ := store.GetBuildLastBefore(c, repo, build.Branch, build.ID) secs, err := server.Config.Services.Secrets.SecretListBuild(repo, build) if err != nil { - logrus.Debugf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) } regs, err := server.Config.Services.Registries.RegistryList(repo) if err != nil { - logrus.Debugf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) } envs := map[string]string{} if server.Config.Services.Environ != nil { @@ -318,7 +318,7 @@ func PostApproval(c *gin.Context) { buildItems, err := b.Build() if err != nil { if _, err = shared.UpdateToStatusError(store.FromContext(c), *build, err); err != nil { - logrus.Errorf("Error setting error status of build for %s#%d. %s", repo.FullName, build.Number, err) + log.Error().Msgf("Error setting error status of build for %s#%d. %s", repo.FullName, build.Number, err) } return } @@ -326,7 +326,7 @@ func PostApproval(c *gin.Context) { err = store.FromContext(c).ProcCreate(build.Procs) if err != nil { - logrus.Errorf("error persisting procs %s/%d: %s", repo.FullName, build.Number, err) + log.Error().Msgf("error persisting procs %s/%d: %s", repo.FullName, build.Number, err) } defer func() { @@ -338,7 +338,7 @@ func PostApproval(c *gin.Context) { err = remote_.Status(c, user, repo, build, uri, nil) } if err != nil { - logrus.Errorf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) + log.Error().Msgf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) } } }() @@ -375,7 +375,7 @@ func PostDecline(c *gin.Context) { uri := fmt.Sprintf("%s/%s/%d", server.Config.Server.Host, repo.FullName, build.Number) err = remote_.Status(c, user, repo, build, uri, nil) if err != nil { - logrus.Errorf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) + log.Error().Msgf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) } c.JSON(200, build) @@ -403,14 +403,14 @@ func PostBuild(c *gin.Context) { user, err := store.GetUser(c, repo.UserID) if err != nil { - logrus.Errorf("failure to find repo owner %s. %s", repo.FullName, err) + log.Error().Msgf("failure to find repo owner %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } build, err := store.GetBuildNumber(c, repo, num) if err != nil { - logrus.Errorf("failure to get build %d. %s", num, err) + log.Error().Msgf("failure to get build %d. %s", num, err) c.AbortWithError(404, err) return } @@ -435,14 +435,14 @@ func PostBuild(c *gin.Context) { // fetch the pipeline config from database configs, err := server.Config.Storage.Config.ConfigsForBuild(build.ID) if err != nil { - logrus.Errorf("failure to get build config for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to get build config for %s. %s", repo.FullName, err) c.AbortWithError(404, err) return } netrc, err := remote_.Netrc(user, repo) if err != nil { - logrus.Errorf("failure to generate netrc for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to generate netrc for %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } @@ -473,7 +473,7 @@ func PostBuild(c *gin.Context) { err = persistBuildConfigs(configs, build.ID) if err != nil { - logrus.Errorf("failure to persist build config for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to persist build config for %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } @@ -495,11 +495,11 @@ func PostBuild(c *gin.Context) { last, _ := store.GetBuildLastBefore(c, repo, build.Branch, build.ID) secs, err := server.Config.Services.Secrets.SecretListBuild(repo, build) if err != nil { - logrus.Debugf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) } regs, err := server.Config.Services.Registries.RegistryList(repo) if err != nil { - logrus.Debugf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) } if server.Config.Services.Environ != nil { globals, _ := server.Config.Services.Environ.EnvironList(repo) @@ -537,7 +537,7 @@ func PostBuild(c *gin.Context) { err = store.FromContext(c).ProcCreate(build.Procs) if err != nil { - logrus.Errorf("cannot restart %s#%d: %s", repo.FullName, build.Number, err) + log.Error().Msgf("cannot restart %s#%d: %s", repo.FullName, build.Number, err) build.Status = model.StatusError build.Started = time.Now().Unix() build.Finished = build.Started diff --git a/server/api/file.go b/server/api/file.go index 9bb4aa190..e5cf0020a 100644 --- a/server/api/file.go +++ b/server/api/file.go @@ -21,6 +21,7 @@ import ( "strings" "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" "github.com/woodpecker-ci/woodpecker/server/store" ) diff --git a/server/api/hook.go b/server/api/hook.go index 1ed03819f..ac1dda230 100644 --- a/server/api/hook.go +++ b/server/api/hook.go @@ -29,19 +29,18 @@ import ( "time" "github.com/gin-gonic/gin" - - "github.com/sirupsen/logrus" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/shared/token" + "github.com/rs/zerolog/log" "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml" "github.com/woodpecker-ci/woodpecker/pipeline/rpc" "github.com/woodpecker-ci/woodpecker/server" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/pubsub" "github.com/woodpecker-ci/woodpecker/server/queue" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/shared" "github.com/woodpecker-ci/woodpecker/server/store" + "github.com/woodpecker-ci/woodpecker/shared/token" ) var skipRe = regexp.MustCompile(`\[(?i:ci *skip|skip *ci)\]`) @@ -81,7 +80,7 @@ func PostHook(c *gin.Context) { tmpRepo, build, err := remote_.Hook(c.Request) if err != nil { - logrus.Errorf("failure to parse hook. %s", err) + log.Error().Msgf("failure to parse hook. %s", err) c.AbortWithError(400, err) return } @@ -90,7 +89,7 @@ func PostHook(c *gin.Context) { return } if tmpRepo == nil { - logrus.Errorf("failure to ascertain repo from hook.") + log.Error().Msgf("failure to ascertain repo from hook.") c.Writer.WriteHeader(400) return } @@ -99,19 +98,19 @@ func PostHook(c *gin.Context) { // wrapped in square brackets appear in the commit message skipMatch := skipRe.FindString(build.Message) if len(skipMatch) > 0 { - logrus.Infof("ignoring hook. %s found in %s", skipMatch, build.Commit) + log.Info().Msgf("ignoring hook. %s found in %s", skipMatch, build.Commit) c.Writer.WriteHeader(204) return } repo, err := store.GetRepoOwnerName(c, tmpRepo.Owner, tmpRepo.Name) if err != nil { - logrus.Errorf("failure to find repo %s/%s from hook. %s", tmpRepo.Owner, tmpRepo.Name, err) + log.Error().Msgf("failure to find repo %s/%s from hook. %s", tmpRepo.Owner, tmpRepo.Name, err) c.AbortWithError(404, err) return } if !repo.IsActive { - logrus.Errorf("ignoring hook. %s/%s is inactive.", tmpRepo.Owner, tmpRepo.Name) + log.Error().Msgf("ignoring hook. %s/%s is inactive.", tmpRepo.Owner, tmpRepo.Name) c.AbortWithError(204, err) return } @@ -121,24 +120,24 @@ func PostHook(c *gin.Context) { return repo.Hash, nil }) if err != nil { - logrus.Errorf("failure to parse token from hook for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to parse token from hook for %s. %s", repo.FullName, err) c.AbortWithError(400, err) return } if parsed.Text != repo.FullName { - logrus.Errorf("failure to verify token from hook. Expected %s, got %s", repo.FullName, parsed.Text) + log.Error().Msgf("failure to verify token from hook. Expected %s, got %s", repo.FullName, parsed.Text) c.AbortWithStatus(403) return } if repo.UserID == 0 { - logrus.Warnf("ignoring hook. repo %s has no owner.", repo.FullName) + log.Warn().Msgf("ignoring hook. repo %s has no owner.", repo.FullName) c.Writer.WriteHeader(204) return } if build.Event == model.EventPull && !repo.AllowPull { - logrus.Infof("ignoring hook. repo %s is disabled for pull requests.", repo.FullName) + log.Info().Msgf("ignoring hook. repo %s is disabled for pull requests.", repo.FullName) c.Writer.Write([]byte("pulls are disabled on woodpecker for this repo")) c.Writer.WriteHeader(204) return @@ -146,7 +145,7 @@ func PostHook(c *gin.Context) { user, err := store.GetUser(c, repo.UserID) if err != nil { - logrus.Errorf("failure to find repo owner %s. %s", repo.FullName, err) + log.Error().Msgf("failure to find repo owner %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } @@ -157,10 +156,10 @@ func PostHook(c *gin.Context) { if refresher, ok := remote_.(remote.Refresher); ok { ok, err := refresher.Refresh(c, user) if err != nil { - logrus.Errorf("failed to refresh oauth2 token: %s", err) + log.Error().Msgf("failed to refresh oauth2 token: %s", err) } else if ok { if err := store.UpdateUser(c, user); err != nil { - logrus.Errorf("error while updating user: %s", err) + log.Error().Msgf("error while updating user: %s", err) // move forward } } @@ -170,14 +169,14 @@ func PostHook(c *gin.Context) { configFetcher := shared.NewConfigFetcher(remote_, user, repo, build) remoteYamlConfigs, err := configFetcher.Fetch(c) if err != nil { - logrus.Errorf("error: %s: cannot find %s in %s: %s", repo.FullName, repo.Config, build.Ref, err) + log.Error().Msgf("error: %s: cannot find %s in %s: %s", repo.FullName, repo.Config, build.Ref, err) c.AbortWithError(404, err) return } filtered, err := branchFiltered(build, remoteYamlConfigs) if err != nil { - logrus.Errorf("failure to parse yaml from hook for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to parse yaml from hook for %s. %s", repo.FullName, err) c.AbortWithError(400, err) } if filtered { @@ -201,7 +200,7 @@ func PostHook(c *gin.Context) { err = store.CreateBuild(c, build, build.Procs...) if err != nil { - logrus.Errorf("failure to save commit for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to save commit for %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } @@ -210,7 +209,7 @@ func PostHook(c *gin.Context) { for _, remoteYamlConfig := range remoteYamlConfigs { _, err := findOrPersistPipelineConfig(repo, build, remoteYamlConfig) if err != nil { - logrus.Errorf("failure to find or persist build config for %s. %s", repo.FullName, err) + log.Error().Msgf("failure to find or persist build config for %s. %s", repo.FullName, err) c.AbortWithError(500, err) return } @@ -238,12 +237,12 @@ func PostHook(c *gin.Context) { secs, err := server.Config.Services.Secrets.SecretListBuild(repo, build) if err != nil { - logrus.Debugf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err) } regs, err := server.Config.Services.Registries.RegistryList(repo) if err != nil { - logrus.Debugf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) + log.Debug().Msgf("Error getting registry credentials for %s#%d. %s", repo.FullName, build.Number, err) } // get the previous build so that we can send status change notifications @@ -263,7 +262,7 @@ func PostHook(c *gin.Context) { buildItems, err := b.Build() if err != nil { if _, err = shared.UpdateToStatusError(store.FromContext(c), *build, err); err != nil { - logrus.Errorf("Error setting error status of build for %s#%d. %s", repo.FullName, build.Number, err) + log.Error().Msgf("Error setting error status of build for %s#%d. %s", repo.FullName, build.Number, err) } return } @@ -271,7 +270,7 @@ func PostHook(c *gin.Context) { err = store.FromContext(c).ProcCreate(build.Procs) if err != nil { - logrus.Errorf("error persisting procs %s/%d: %s", repo.FullName, build.Number, err) + log.Error().Msgf("error persisting procs %s/%d: %s", repo.FullName, build.Number, err) } defer func() { @@ -283,7 +282,7 @@ func PostHook(c *gin.Context) { err = remote_.Status(c, user, repo, build, uri, nil) } if err != nil { - logrus.Errorf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) + log.Error().Msgf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) } } }() @@ -294,14 +293,14 @@ func PostHook(c *gin.Context) { // TODO: parse yaml once and not for each filter function func branchFiltered(build *model.Build, remoteYamlConfigs []*remote.FileMeta) (bool, error) { - logrus.Tracef("hook.branchFiltered(): build branch: '%s' build event: '%s' config count: %d", build.Branch, build.Event, len(remoteYamlConfigs)) + log.Trace().Msgf("hook.branchFiltered(): build branch: '%s' build event: '%s' config count: %d", build.Branch, build.Event, len(remoteYamlConfigs)) for _, remoteYamlConfig := range remoteYamlConfigs { parsedPipelineConfig, err := yaml.ParseString(string(remoteYamlConfig.Data)) if err != nil { - logrus.Tracef("parse config '%s': %s", remoteYamlConfig.Name, err) + log.Trace().Msgf("parse config '%s': %s", remoteYamlConfig.Name, err) return false, err } - logrus.Tracef("config '%s': %#v", remoteYamlConfig.Name, parsedPipelineConfig) + log.Trace().Msgf("config '%s': %#v", remoteYamlConfig.Name, parsedPipelineConfig) if !parsedPipelineConfig.Branches.Match(build.Branch) && build.Event != model.EventTag && build.Event != model.EventDeploy { } else { diff --git a/server/api/login.go b/server/api/login.go index 952b5bffd..f174d9900 100644 --- a/server/api/login.go +++ b/server/api/login.go @@ -19,16 +19,16 @@ import ( "net/http" "time" + "github.com/gin-gonic/gin" "github.com/gorilla/securecookie" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/shared/httputil" "github.com/woodpecker-ci/woodpecker/shared/token" - - "github.com/gin-gonic/gin" - "github.com/sirupsen/logrus" ) func HandleLogin(c *gin.Context) { @@ -56,7 +56,7 @@ func HandleAuth(c *gin.Context) { tmpuser, err := remote.Login(c, c.Writer, c.Request) if err != nil { - logrus.Errorf("cannot authenticate user. %s", err) + log.Error().Msgf("cannot authenticate user. %s", err) c.Redirect(303, "/login?error=oauth_error") return } @@ -73,7 +73,7 @@ func HandleAuth(c *gin.Context) { // if self-registration is disabled we should return a not authorized error if !config.Open && !config.IsAdmin(tmpuser) { - logrus.Errorf("cannot register %s. registration closed", tmpuser.Login) + log.Error().Msgf("cannot register %s. registration closed", tmpuser.Login) c.Redirect(303, "/login?error=access_denied") return } @@ -83,7 +83,7 @@ func HandleAuth(c *gin.Context) { if len(config.Orgs) != 0 { teams, terr := remote.Teams(c, tmpuser) if terr != nil || config.IsMember(teams) == false { - logrus.Errorf("cannot verify team membership for %s.", u.Login) + log.Error().Msgf("cannot verify team membership for %s.", u.Login) c.Redirect(303, "/login?error=access_denied") return } @@ -103,7 +103,7 @@ func HandleAuth(c *gin.Context) { // insert the user into the database if err := store.CreateUser(c, u); err != nil { - logrus.Errorf("cannot insert %s. %s", u.Login, err) + log.Error().Msgf("cannot insert %s. %s", u.Login, err) c.Redirect(303, "/login?error=internal_error") return } @@ -120,14 +120,14 @@ func HandleAuth(c *gin.Context) { if len(config.Orgs) != 0 { teams, terr := remote.Teams(c, u) if terr != nil || config.IsMember(teams) == false { - logrus.Errorf("cannot verify team membership for %s.", u.Login) + log.Error().Msgf("cannot verify team membership for %s.", u.Login) c.Redirect(303, "/login?error=access_denied") return } } if err := store.UpdateUser(c, u); err != nil { - logrus.Errorf("cannot update %s. %s", u.Login, err) + log.Error().Msgf("cannot update %s. %s", u.Login, err) c.Redirect(303, "/login?error=internal_error") return } @@ -135,7 +135,7 @@ func HandleAuth(c *gin.Context) { exp := time.Now().Add(server.Config.Server.SessionExpires).Unix() tokenString, err := token.New(token.SessToken, u.Login).SignExpires(u.Hash, exp) if err != nil { - logrus.Errorf("cannot create token for %s. %s", u.Login, err) + log.Error().Msgf("cannot create token for %s. %s", u.Login, err) c.Redirect(303, "/login?error=internal_error") return } diff --git a/server/api/metrics/prometheus.go b/server/api/metrics/prometheus.go index 3a23eec5a..1397e8f38 100644 --- a/server/api/metrics/prometheus.go +++ b/server/api/metrics/prometheus.go @@ -20,6 +20,7 @@ import ( "github.com/gin-gonic/gin" "github.com/prometheus/client_golang/prometheus/promhttp" + "github.com/woodpecker-ci/woodpecker/server" ) diff --git a/server/api/registry.go b/server/api/registry.go index 0c48a1af3..61bd7cdef 100644 --- a/server/api/registry.go +++ b/server/api/registry.go @@ -17,11 +17,11 @@ package api import ( "net/http" + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" - - "github.com/gin-gonic/gin" ) // GetRegistry gets the name registry from the database and writes diff --git a/server/api/secret.go b/server/api/secret.go index 2b9fcfe1c..4ff3a9980 100644 --- a/server/api/secret.go +++ b/server/api/secret.go @@ -17,11 +17,11 @@ package api import ( "net/http" + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" - - "github.com/gin-gonic/gin" ) // GetSecret gets the named secret from the database and writes diff --git a/server/api/stream.go b/server/api/stream.go index 42509f866..643d00036 100644 --- a/server/api/stream.go +++ b/server/api/stream.go @@ -22,15 +22,15 @@ import ( "strconv" "time" + "github.com/gin-gonic/gin" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/logging" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/pubsub" "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" "github.com/woodpecker-ci/woodpecker/server/store" - - "github.com/gin-gonic/gin" - "github.com/sirupsen/logrus" ) // @@ -55,7 +55,7 @@ func EventStreamSSE(c *gin.Context) { io.WriteString(rw, ": ping\n\n") flusher.Flush() - logrus.Debugf("user feed: connection opened") + log.Debug().Msg("user feed: connection opened") user := session.User(c) repo := map[string]bool{} @@ -74,7 +74,7 @@ func EventStreamSSE(c *gin.Context) { defer func() { cancel() close(eventc) - logrus.Debugf("user feed: connection closed") + log.Debug().Msg("user feed: connection closed") }() go func() { @@ -159,18 +159,18 @@ func LogStreamSSE(c *gin.Context) { build, err := store.GetBuildNumber(c, repo, buildn) if err != nil { - logrus.Debugln("stream cannot get build number.", err) + log.Debug().Msgf("stream cannot get build number: %v", err) io.WriteString(rw, "event: error\ndata: build not found\n\n") return } proc, err := store.FromContext(c).ProcFind(build, jobn) if err != nil { - logrus.Debugln("stream cannot get proc number.", err) + log.Debug().Msgf("stream cannot get proc number: %v", err) io.WriteString(rw, "event: error\ndata: process not found\n\n") return } if proc.State != model.StatusRunning { - logrus.Debugln("stream not found.") + log.Debug().Msg("stream not found.") io.WriteString(rw, "event: error\ndata: stream not found\n\n") return } @@ -180,12 +180,12 @@ func LogStreamSSE(c *gin.Context) { context.Background(), ) - logrus.Debugf("log stream: connection opened") + log.Debug().Msgf("log stream: connection opened") defer func() { cancel() close(logc) - logrus.Debugf("log stream: connection closed") + log.Debug().Msgf("log stream: connection closed") }() go func() { @@ -214,7 +214,7 @@ func LogStreamSSE(c *gin.Context) { c.Request.Header.Get("Last-Event-ID"), ) if last != 0 { - logrus.Debugf("log stream: reconnect: last-event-id: %d", last) + log.Debug().Msgf("log stream: reconnect: last-event-id: %d", last) } // retry: 10000\n diff --git a/server/api/user.go b/server/api/user.go index 15e9ce5a3..d7c377b46 100644 --- a/server/api/user.go +++ b/server/api/user.go @@ -22,7 +22,7 @@ import ( "github.com/gin-gonic/gin" "github.com/gorilla/securecookie" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" @@ -41,7 +41,7 @@ func GetFeed(c *gin.Context) { latest, _ := strconv.ParseBool(c.Query("latest")) if time.Unix(user.Synced, 0).Add(time.Hour * 72).Before(time.Now()) { - logrus.Debugf("sync begin: %s", user.Login) + log.Debug().Msgf("sync begin: %s", user.Login) user.Synced = time.Now().Unix() store.FromContext(c).UpdateUser(user) @@ -55,9 +55,9 @@ func GetFeed(c *gin.Context) { Match: shared.NamespaceFilter(config.OwnersWhitelist), } if err := sync.Sync(c, user); err != nil { - logrus.Debugf("sync error: %s: %s", user.Login, err) + log.Debug().Msgf("sync error: %s: %s", user.Login, err) } else { - logrus.Debugf("sync complete: %s", user.Login) + log.Debug().Msgf("sync complete: %s", user.Login) } } @@ -87,7 +87,7 @@ func GetRepos(c *gin.Context) { ) if flush || time.Unix(user.Synced, 0).Add(time.Hour*72).Before(time.Now()) { - logrus.Debugf("sync begin: %s", user.Login) + log.Debug().Msgf("sync begin: %s", user.Login) user.Synced = time.Now().Unix() store.FromContext(c).UpdateUser(user) @@ -101,9 +101,9 @@ func GetRepos(c *gin.Context) { } if err := sync.Sync(c, user); err != nil { - logrus.Debugf("sync error: %s: %s", user.Login, err) + log.Debug().Msgf("sync error: %s: %s", user.Login, err) } else { - logrus.Debugf("sync complete: %s", user.Login) + log.Debug().Msgf("sync complete: %s", user.Login) } } diff --git a/server/api/z.go b/server/api/z.go index 8e8aaf004..8c71f5404 100644 --- a/server/api/z.go +++ b/server/api/z.go @@ -15,10 +15,10 @@ package api import ( + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/version" - - "github.com/gin-gonic/gin" ) // Health endpoint returns a 500 if the server state is unhealthy. diff --git a/server/grpc/rpc.go b/server/grpc/rpc.go index b6cd9a843..0cb2ff9b7 100644 --- a/server/grpc/rpc.go +++ b/server/grpc/rpc.go @@ -22,29 +22,26 @@ import ( "context" "encoding/json" "fmt" - "log" "strconv" - oldcontext "golang.org/x/net/context" - - grpcMetadata "google.golang.org/grpc/metadata" + "github.com/rs/zerolog/log" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promauto" - "github.com/sirupsen/logrus" + oldcontext "golang.org/x/net/context" + grpcMetadata "google.golang.org/grpc/metadata" + + "github.com/woodpecker-ci/expr" "github.com/woodpecker-ci/woodpecker/pipeline/rpc" "github.com/woodpecker-ci/woodpecker/pipeline/rpc/proto" "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/logging" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/pubsub" "github.com/woodpecker-ci/woodpecker/server/queue" - "github.com/woodpecker-ci/woodpecker/server/shared" - - "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" + "github.com/woodpecker-ci/woodpecker/server/shared" "github.com/woodpecker-ci/woodpecker/server/store" - - "github.com/woodpecker-ci/expr" ) type RPC struct { @@ -64,7 +61,7 @@ func (s *RPC) Next(c context.Context, filter rpc.Filter) (*rpc.Pipeline, error) if ok { hostname, ok := metadata["hostname"] if ok && len(hostname) != 0 { - logrus.Debugf("agent connected: %s: polling", hostname[0]) + log.Debug().Msgf("agent connected: %s: polling", hostname[0]) } } @@ -109,19 +106,19 @@ func (s *RPC) Update(c context.Context, id string, state rpc.State) error { pproc, err := s.store.ProcLoad(procID) if err != nil { - log.Printf("error: rpc.update: cannot find pproc with id %d: %s", procID, err) + log.Error().Msgf("error: rpc.update: cannot find pproc with id %d: %s", procID, err) return err } build, err := s.store.GetBuild(pproc.BuildID) if err != nil { - log.Printf("error: cannot find build with id %d: %s", pproc.BuildID, err) + log.Error().Msgf("error: cannot find build with id %d: %s", pproc.BuildID, err) return err } proc, err := s.store.ProcChild(build, pproc.PID, state.Proc) if err != nil { - log.Printf("error: cannot find proc with name %s: %s", state.Proc, err) + log.Error().Msgf("error: cannot find proc with name %s: %s", state.Proc, err) return err } @@ -135,12 +132,12 @@ func (s *RPC) Update(c context.Context, id string, state rpc.State) error { repo, err := s.store.GetRepo(build.RepoID) if err != nil { - log.Printf("error: cannot find repo with id %d: %s", build.RepoID, err) + log.Error().Msgf("error: cannot find repo with id %d: %s", build.RepoID, err) return err } if proc, err = shared.UpdateProcStatus(s.store, *proc, state, build.Started); err != nil { - log.Printf("error: rpc.update: cannot update proc: %s", err) + log.Error().Msgf("error: rpc.update: cannot update proc: %s", err) } build.Procs, _ = s.store.ProcList(build) @@ -169,19 +166,19 @@ func (s *RPC) Upload(c context.Context, id string, file *rpc.File) error { pproc, err := s.store.ProcLoad(procID) if err != nil { - log.Printf("error: cannot find parent proc with id %d: %s", procID, err) + log.Error().Msgf("error: cannot find parent proc with id %d: %s", procID, err) return err } build, err := s.store.GetBuild(pproc.BuildID) if err != nil { - log.Printf("error: cannot find build with id %d: %s", pproc.BuildID, err) + log.Error().Msgf("error: cannot find build with id %d: %s", pproc.BuildID, err) return err } proc, err := s.store.ProcChild(build, pproc.PID, file.Proc) if err != nil { - log.Printf("error: cannot find child proc with name %s: %s", file.Proc, err) + log.Error().Msgf("error: cannot find child proc with name %s: %s", file.Proc, err) return err } @@ -242,7 +239,7 @@ func (s *RPC) Init(c context.Context, id string, state rpc.State) error { proc, err := s.store.ProcLoad(procID) if err != nil { - log.Printf("error: cannot find proc with id %d: %s", procID, err) + log.Error().Msgf("error: cannot find proc with id %d: %s", procID, err) return err } metadata, ok := grpcMetadata.FromIncomingContext(c) @@ -255,19 +252,19 @@ func (s *RPC) Init(c context.Context, id string, state rpc.State) error { build, err := s.store.GetBuild(proc.BuildID) if err != nil { - log.Printf("error: cannot find build with id %d: %s", proc.BuildID, err) + log.Error().Msgf("error: cannot find build with id %d: %s", proc.BuildID, err) return err } repo, err := s.store.GetRepo(build.RepoID) if err != nil { - log.Printf("error: cannot find repo with id %d: %s", build.RepoID, err) + log.Error().Msgf("error: cannot find repo with id %d: %s", build.RepoID, err) return err } if build.Status == model.StatusPending { if build, err = shared.UpdateToStatusRunning(s.store, *build, state.Started); err != nil { - log.Printf("error: init: cannot update build_id %d state: %s", build.ID, err) + log.Error().Msgf("error: init: cannot update build_id %d state: %s", build.ID, err) } } @@ -299,24 +296,24 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error { proc, err := s.store.ProcLoad(procID) if err != nil { - log.Printf("error: cannot find proc with id %d: %s", procID, err) + log.Error().Msgf("error: cannot find proc with id %d: %s", procID, err) return err } build, err := s.store.GetBuild(proc.BuildID) if err != nil { - log.Printf("error: cannot find build with id %d: %s", proc.BuildID, err) + log.Error().Msgf("error: cannot find build with id %d: %s", proc.BuildID, err) return err } repo, err := s.store.GetRepo(build.RepoID) if err != nil { - log.Printf("error: cannot find repo with id %d: %s", build.RepoID, err) + log.Error().Msgf("error: cannot find repo with id %d: %s", build.RepoID, err) return err } if proc, err = shared.UpdateProcStatusToDone(s.store, *proc, state); err != nil { - log.Printf("error: done: cannot update proc_id %d state: %s", proc.ID, err) + log.Error().Msgf("error: done: cannot update proc_id %d state: %s", proc.ID, err) } var queueErr error @@ -326,7 +323,7 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error { queueErr = s.queue.Done(c, id, proc.State) } if queueErr != nil { - log.Printf("error: done: cannot ack proc_id %d: %s", procID, err) + log.Error().Msgf("error: done: cannot ack proc_id %d: %s", procID, err) } procs, _ := s.store.ProcList(build) @@ -334,7 +331,7 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error { if !isThereRunningStage(procs) { if build, err = shared.UpdateStatusToDone(s.store, *build, buildStatus(procs), proc.Stopped); err != nil { - log.Printf("error: done: cannot update build_id %d final state: %s", build.ID, err) + log.Error().Msgf("error: done: cannot update build_id %d final state: %s", build.ID, err) } if !isMultiPipeline(procs) { @@ -347,7 +344,7 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error { } if err := s.logger.Close(c, id); err != nil { - log.Printf("error: done: cannot close build_id %d logger: %s", proc.ID, err) + log.Error().Msgf("error: done: cannot close build_id %d logger: %s", proc.ID, err) } s.notify(c, repo, build, procs) @@ -385,7 +382,7 @@ func (s *RPC) completeChildrenIfParentCompleted(procs []*model.Proc, completedPr for _, p := range procs { if p.Running() && p.PPID == completedProc.PID { if _, err := shared.UpdateProcToStatusSkipped(s.store, *p, completedProc.Stopped); err != nil { - log.Printf("error: done: cannot update proc_id %d child state: %s", p.ID, err) + log.Error().Msgf("error: done: cannot update proc_id %d child state: %s", p.ID, err) } } } @@ -428,7 +425,7 @@ func (s *RPC) updateRemoteStatus(ctx context.Context, repo *model.Repo, build *m uri := fmt.Sprintf("%s/%s/%d", server.Config.Server.Host, repo.FullName, build.Number) err = s.remote.Status(ctx, user, repo, build, uri, proc) if err != nil { - logrus.Errorf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) + log.Error().Msgf("error setting commit status for %s/%d: %v", repo.FullName, build.Number, err) } } } diff --git a/server/model/queue.go b/server/model/queue.go index ea81333b8..33361cacf 100644 --- a/server/model/queue.go +++ b/server/model/queue.go @@ -17,7 +17,8 @@ package model import ( "context" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server/queue" ) @@ -101,11 +102,11 @@ func (q *persistentQueue) PushAtOnce(c context.Context, tasks []*queue.Task) err func (q *persistentQueue) Poll(c context.Context, f queue.Filter) (*queue.Task, error) { task, err := q.Queue.Poll(c, f) if task != nil { - logrus.Debugf("pull queue item: %s: remove from backup", task.ID) + log.Debug().Msgf("pull queue item: %s: remove from backup", task.ID) if derr := q.store.TaskDelete(task.ID); derr != nil { - logrus.Errorf("pull queue item: %s: failed to remove from backup: %s", task.ID, derr) + log.Error().Msgf("pull queue item: %s: failed to remove from backup: %s", task.ID, derr) } else { - logrus.Debugf("pull queue item: %s: successfully removed from backup", task.ID) + log.Debug().Msgf("pull queue item: %s: successfully removed from backup", task.ID) } } return task, err diff --git a/server/plugins/environments/filesystem.go b/server/plugins/environments/filesystem.go index 173da1e75..449e66420 100644 --- a/server/plugins/environments/filesystem.go +++ b/server/plugins/environments/filesystem.go @@ -1,8 +1,9 @@ package environments import ( - "github.com/woodpecker-ci/woodpecker/server/model" "strings" + + "github.com/woodpecker-ci/woodpecker/server/model" ) type builtin struct { diff --git a/server/plugins/registry/filesystem.go b/server/plugins/registry/filesystem.go index 83964dae2..6f32be55a 100644 --- a/server/plugins/registry/filesystem.go +++ b/server/plugins/registry/filesystem.go @@ -4,11 +4,13 @@ import ( "encoding/base64" "encoding/json" "fmt" - "github.com/docker/cli/cli/config/configfile" - "github.com/docker/cli/cli/config/types" - "github.com/woodpecker-ci/woodpecker/server/model" "os" "strings" + + "github.com/docker/cli/cli/config/configfile" + "github.com/docker/cli/cli/config/types" + + "github.com/woodpecker-ci/woodpecker/server/model" ) type filesystem struct { diff --git a/server/queue/fifo.go b/server/queue/fifo.go index 5bf82f172..dae761df0 100644 --- a/server/queue/fifo.go +++ b/server/queue/fifo.go @@ -3,12 +3,11 @@ package queue import ( "container/list" "context" - "log" "runtime" "sync" "time" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" ) const ( @@ -237,7 +236,7 @@ func (q *fifo) process() { const size = 64 << 10 buf := make([]byte, size) buf = buf[:runtime.Stack(buf, false)] - log.Printf("queue: unexpected panic: %v\n%s", err, buf) + log.Error().Msgf("queue: unexpected panic: %v\n%s", err, buf) } }() @@ -273,7 +272,7 @@ func (q *fifo) filterWaiting() { nextPending = e.Next() task := e.Value.(*Task) if q.depsInQueue(task) { - logrus.Debugf("queue: waiting due to unmet dependencies %v", task.ID) + log.Debug().Msgf("queue: waiting due to unmet dependencies %v", task.ID) q.waitingOnDeps.PushBack(task) filtered = append(filtered, e) } @@ -290,11 +289,11 @@ func (q *fifo) assignToWorker() (*list.Element, *worker) { for e := q.pending.Front(); e != nil; e = next { next = e.Next() task := e.Value.(*Task) - logrus.Debugf("queue: trying to assign task: %v with deps %v", task.ID, task.Dependencies) + log.Debug().Msgf("queue: trying to assign task: %v with deps %v", task.ID, task.Dependencies) for w := range q.workers { if w.filter(task) { - logrus.Debugf("queue: assigned task: %v with deps %v", task.ID, task.Dependencies) + log.Debug().Msgf("queue: assigned task: %v with deps %v", task.ID, task.Dependencies) return e, w } } @@ -318,7 +317,7 @@ func (q *fifo) depsInQueue(task *Task) bool { for e := q.pending.Front(); e != nil; e = next { next = e.Next() possibleDep, ok := e.Value.(*Task) - logrus.Debugf("queue: pending right now: %v", possibleDep.ID) + log.Debug().Msgf("queue: pending right now: %v", possibleDep.ID) for _, dep := range task.Dependencies { if ok && possibleDep.ID == dep { return true @@ -326,7 +325,7 @@ func (q *fifo) depsInQueue(task *Task) bool { } } for possibleDepID := range q.running { - logrus.Debugf("queue: running right now: %v", possibleDepID) + log.Debug().Msgf("queue: running right now: %v", possibleDepID) for _, dep := range task.Dependencies { if possibleDepID == dep { return true @@ -369,13 +368,13 @@ func (q *fifo) updateDepStatusInQueue(taskID string, status string) { } func (q *fifo) removeFromPending(taskID string) { - logrus.Debugf("queue: trying to remove %s", taskID) + log.Debug().Msgf("queue: trying to remove %s", taskID) var next *list.Element for e := q.pending.Front(); e != nil; e = next { next = e.Next() task := e.Value.(*Task) if task.ID == taskID { - logrus.Debugf("queue: %s is removed from pending", taskID) + log.Debug().Msgf("queue: %s is removed from pending", taskID) q.pending.Remove(e) return } diff --git a/server/queue/fifo_test.go b/server/queue/fifo_test.go index 151d95e6d..aa65f1436 100644 --- a/server/queue/fifo_test.go +++ b/server/queue/fifo_test.go @@ -246,7 +246,6 @@ func TestFifoErrors2(t *testing.T) { } func TestFifoErrorsMultiThread(t *testing.T) { - //logrus.SetLevel(logrus.DebugLevel) task1 := &Task{ ID: "1", } diff --git a/server/remote/bitbucket/bitbucket.go b/server/remote/bitbucket/bitbucket.go index 07bacbe19..d071843b6 100644 --- a/server/remote/bitbucket/bitbucket.go +++ b/server/remote/bitbucket/bitbucket.go @@ -20,12 +20,12 @@ import ( "net/http" "net/url" + "golang.org/x/oauth2" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/internal" - - "golang.org/x/oauth2" ) // Bitbucket cloud endpoints. diff --git a/server/remote/bitbucket/bitbucket_test.go b/server/remote/bitbucket/bitbucket_test.go index e642afc20..5273c3bd2 100644 --- a/server/remote/bitbucket/bitbucket_test.go +++ b/server/remote/bitbucket/bitbucket_test.go @@ -21,12 +21,12 @@ import ( "net/http/httptest" "testing" + "github.com/franela/goblin" + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/fixtures" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/internal" - - "github.com/franela/goblin" - "github.com/gin-gonic/gin" ) func Test_bitbucket(t *testing.T) { diff --git a/server/remote/bitbucket/convert.go b/server/remote/bitbucket/convert.go index 50139ebd7..98f1e67ee 100644 --- a/server/remote/bitbucket/convert.go +++ b/server/remote/bitbucket/convert.go @@ -20,10 +20,10 @@ import ( "regexp" "strings" + "golang.org/x/oauth2" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/internal" - - "golang.org/x/oauth2" ) const ( diff --git a/server/remote/bitbucket/convert_test.go b/server/remote/bitbucket/convert_test.go index 1fcbb7b13..53ff07dc9 100644 --- a/server/remote/bitbucket/convert_test.go +++ b/server/remote/bitbucket/convert_test.go @@ -18,11 +18,11 @@ import ( "testing" "time" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/internal" - "github.com/franela/goblin" "golang.org/x/oauth2" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/internal" ) func Test_helper(t *testing.T) { diff --git a/server/remote/bitbucket/parse_test.go b/server/remote/bitbucket/parse_test.go index 7fd928d38..60278e82f 100644 --- a/server/remote/bitbucket/parse_test.go +++ b/server/remote/bitbucket/parse_test.go @@ -19,9 +19,9 @@ import ( "net/http" "testing" - "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/fixtures" - "github.com/franela/goblin" + + "github.com/woodpecker-ci/woodpecker/server/remote/bitbucket/fixtures" ) func Test_parser(t *testing.T) { diff --git a/server/remote/bitbucketserver/bitbucketserver.go b/server/remote/bitbucketserver/bitbucketserver.go index 0d904104d..be0691f41 100644 --- a/server/remote/bitbucketserver/bitbucketserver.go +++ b/server/remote/bitbucketserver/bitbucketserver.go @@ -30,6 +30,7 @@ import ( "strings" "github.com/mrjones/oauth" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucketserver/internal" diff --git a/server/remote/bitbucketserver/convert.go b/server/remote/bitbucketserver/convert.go index 6a238d5fb..7ce609acb 100644 --- a/server/remote/bitbucketserver/convert.go +++ b/server/remote/bitbucketserver/convert.go @@ -23,6 +23,7 @@ import ( "time" "github.com/mrjones/oauth" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucketserver/internal" ) diff --git a/server/remote/bitbucketserver/convert_test.go b/server/remote/bitbucketserver/convert_test.go index 4249a78e2..be50adbdb 100644 --- a/server/remote/bitbucketserver/convert_test.go +++ b/server/remote/bitbucketserver/convert_test.go @@ -19,6 +19,7 @@ import ( "github.com/franela/goblin" "github.com/mrjones/oauth" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucketserver/internal" ) diff --git a/server/remote/bitbucketserver/internal/client.go b/server/remote/bitbucketserver/internal/client.go index 8746e9eaa..568537af6 100644 --- a/server/remote/bitbucketserver/internal/client.go +++ b/server/remote/bitbucketserver/internal/client.go @@ -23,11 +23,11 @@ import ( "io/ioutil" "net/http" "strconv" - "strings" "github.com/mrjones/oauth" - log "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server/model" ) @@ -57,7 +57,7 @@ func NewClientWithToken(ctx context.Context, url string, consumer *oauth.Consume token.Token = AccessToken client, err := consumer.MakeHttpClient(&token) if err != nil { - log.Error(err) + log.Err(err).Msg("") } return &Client{ @@ -113,7 +113,7 @@ func (c *Client) FindRepo(owner string, name string) (*Repo, error) { defer response.Body.Close() } if err != nil { - log.Error(err) + log.Err(err).Msg("") } contents, err := ioutil.ReadAll(response.Body) repo := Repo{} @@ -154,14 +154,14 @@ func (c *Client) FindFileForRepo(owner string, repo string, fileName string, ref defer response.Body.Close() } if err != nil { - log.Error(err) + log.Err(err).Msg("") } if response.StatusCode == 404 { return nil, nil } responseBytes, err := ioutil.ReadAll(response.Body) if err != nil { - log.Error(err) + log.Err(err).Msg("") } return responseBytes, nil } diff --git a/server/remote/bitbucketserver/parse.go b/server/remote/bitbucketserver/parse.go index 852910a5a..6f92c43f3 100644 --- a/server/remote/bitbucketserver/parse.go +++ b/server/remote/bitbucketserver/parse.go @@ -17,9 +17,10 @@ package bitbucketserver import ( "encoding/json" "fmt" + "net/http" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/bitbucketserver/internal" - "net/http" ) // parseHook parses a Bitbucket hook from an http.Request request and returns diff --git a/server/remote/coding/coding.go b/server/remote/coding/coding.go index 559cc7d11..19c143b81 100644 --- a/server/remote/coding/coding.go +++ b/server/remote/coding/coding.go @@ -21,12 +21,12 @@ import ( "net/http" "strings" + "golang.org/x/oauth2" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/remote/coding/internal" - - "golang.org/x/oauth2" ) const ( diff --git a/server/remote/coding/coding_test.go b/server/remote/coding/coding_test.go index 19c0ecb83..16df47786 100644 --- a/server/remote/coding/coding_test.go +++ b/server/remote/coding/coding_test.go @@ -21,11 +21,11 @@ import ( "net/http/httptest" "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/coding/fixtures" - "github.com/franela/goblin" "github.com/gin-gonic/gin" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/coding/fixtures" ) func Test_coding(t *testing.T) { diff --git a/server/remote/coding/hook_test.go b/server/remote/coding/hook_test.go index ce540e3c9..fb80c0c71 100644 --- a/server/remote/coding/hook_test.go +++ b/server/remote/coding/hook_test.go @@ -20,10 +20,10 @@ import ( "strings" "testing" + "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/coding/fixtures" - - "github.com/franela/goblin" ) func Test_hook(t *testing.T) { diff --git a/server/remote/gitea/gitea.go b/server/remote/gitea/gitea.go index a5cdf5408..31b81728a 100644 --- a/server/remote/gitea/gitea.go +++ b/server/remote/gitea/gitea.go @@ -28,11 +28,11 @@ import ( "path/filepath" "code.gitea.io/sdk/gitea" + "golang.org/x/oauth2" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" - - "golang.org/x/oauth2" ) const ( diff --git a/server/remote/gitea/gitea_test.go b/server/remote/gitea/gitea_test.go index 47e111bf0..9c757af02 100644 --- a/server/remote/gitea/gitea_test.go +++ b/server/remote/gitea/gitea_test.go @@ -21,6 +21,7 @@ import ( "github.com/franela/goblin" "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/gitea/fixtures" ) diff --git a/server/remote/gitea/helper.go b/server/remote/gitea/helper.go index d43b69e29..6f8b6c57b 100644 --- a/server/remote/gitea/helper.go +++ b/server/remote/gitea/helper.go @@ -23,6 +23,7 @@ import ( "time" "code.gitea.io/sdk/gitea" + "github.com/woodpecker-ci/woodpecker/server/model" ) diff --git a/server/remote/gitea/helper_test.go b/server/remote/gitea/helper_test.go index 4e67901cb..621714648 100644 --- a/server/remote/gitea/helper_test.go +++ b/server/remote/gitea/helper_test.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/sdk/gitea" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/gitea/fixtures" ) diff --git a/server/remote/gitea/parse_test.go b/server/remote/gitea/parse_test.go index 049b28bf5..056a5eda0 100644 --- a/server/remote/gitea/parse_test.go +++ b/server/remote/gitea/parse_test.go @@ -20,6 +20,7 @@ import ( "testing" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/gitea/fixtures" ) diff --git a/server/remote/github/convert.go b/server/remote/github/convert.go index aa0cd17b8..ebcb486ca 100644 --- a/server/remote/github/convert.go +++ b/server/remote/github/convert.go @@ -18,9 +18,9 @@ import ( "fmt" "strings" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/google/go-github/v39/github" + + "github.com/woodpecker-ci/woodpecker/server/model" ) const defaultBranch = "master" diff --git a/server/remote/github/convert_test.go b/server/remote/github/convert_test.go index 2a53fc487..f98cb1e9a 100644 --- a/server/remote/github/convert_test.go +++ b/server/remote/github/convert_test.go @@ -17,10 +17,10 @@ package github import ( "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/franela/goblin" "github.com/google/go-github/v39/github" + + "github.com/woodpecker-ci/woodpecker/server/model" ) func Test_helper(t *testing.T) { diff --git a/server/remote/github/github.go b/server/remote/github/github.go index 47b05b4e5..25ed77900 100644 --- a/server/remote/github/github.go +++ b/server/remote/github/github.go @@ -25,12 +25,12 @@ import ( "strconv" "strings" + "github.com/google/go-github/v39/github" + "golang.org/x/oauth2" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" - - "github.com/google/go-github/v39/github" - "golang.org/x/oauth2" ) const ( diff --git a/server/remote/github/github_test.go b/server/remote/github/github_test.go index 6ea3ee982..d24a1cf86 100644 --- a/server/remote/github/github_test.go +++ b/server/remote/github/github_test.go @@ -19,11 +19,11 @@ import ( "net/http/httptest" "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/github/fixtures" - "github.com/franela/goblin" "github.com/gin-gonic/gin" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/github/fixtures" ) func Test_github(t *testing.T) { diff --git a/server/remote/github/parse_test.go b/server/remote/github/parse_test.go index ab0077498..6bc243869 100644 --- a/server/remote/github/parse_test.go +++ b/server/remote/github/parse_test.go @@ -20,6 +20,7 @@ import ( "testing" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote/github/fixtures" ) diff --git a/server/remote/gitlab/convert.go b/server/remote/gitlab/convert.go index 17f09877b..8ec7b70c5 100644 --- a/server/remote/gitlab/convert.go +++ b/server/remote/gitlab/convert.go @@ -21,9 +21,9 @@ import ( "net/http" "strings" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/xanzy/go-gitlab" + + "github.com/woodpecker-ci/woodpecker/server/model" ) func (g *Gitlab) convertGitlabRepo(repo_ *gitlab.Project) (*model.Repo, error) { diff --git a/server/remote/gitlab/gitlab.go b/server/remote/gitlab/gitlab.go index 77dc8dc9d..109f23f66 100644 --- a/server/remote/gitlab/gitlab.go +++ b/server/remote/gitlab/gitlab.go @@ -25,12 +25,12 @@ import ( "net/url" "strings" + "github.com/xanzy/go-gitlab" + "github.com/woodpecker-ci/woodpecker/server" "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/shared/oauth2" - - "github.com/xanzy/go-gitlab" ) const ( diff --git a/server/remote/gitlab/gitlab_test.go b/server/remote/gitlab/gitlab_test.go index 9ec441ae0..6a4247dcc 100644 --- a/server/remote/gitlab/gitlab_test.go +++ b/server/remote/gitlab/gitlab_test.go @@ -22,11 +22,11 @@ import ( "strconv" "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/gitlab/testdata" - "github.com/franela/goblin" "github.com/stretchr/testify/assert" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/gitlab/testdata" ) func load(config string) *Gitlab { diff --git a/server/remote/gitlab/status.go b/server/remote/gitlab/status.go index 1ae87e7ec..e9355e0a1 100644 --- a/server/remote/gitlab/status.go +++ b/server/remote/gitlab/status.go @@ -15,9 +15,9 @@ package gitlab import ( - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/xanzy/go-gitlab" + + "github.com/woodpecker-ci/woodpecker/server/model" ) const ( diff --git a/server/remote/gogs/gogs.go b/server/remote/gogs/gogs.go index a4e6d74ce..1df26de62 100644 --- a/server/remote/gogs/gogs.go +++ b/server/remote/gogs/gogs.go @@ -24,6 +24,7 @@ import ( "strings" "github.com/gogits/go-gogs-client" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" ) diff --git a/server/remote/gogs/gogs_test.go b/server/remote/gogs/gogs_test.go index bd029a4b2..2c5c9d60c 100644 --- a/server/remote/gogs/gogs_test.go +++ b/server/remote/gogs/gogs_test.go @@ -19,11 +19,11 @@ import ( "net/http/httptest" "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/gogs/fixtures" - "github.com/franela/goblin" "github.com/gin-gonic/gin" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/gogs/fixtures" ) func Test_gogs(t *testing.T) { diff --git a/server/remote/gogs/helper.go b/server/remote/gogs/helper.go index e06d550c4..a62dc1348 100644 --- a/server/remote/gogs/helper.go +++ b/server/remote/gogs/helper.go @@ -23,6 +23,7 @@ import ( "time" "github.com/gogits/go-gogs-client" + "github.com/woodpecker-ci/woodpecker/server/model" ) diff --git a/server/remote/gogs/helper_test.go b/server/remote/gogs/helper_test.go index fab96bba6..e0d09d803 100644 --- a/server/remote/gogs/helper_test.go +++ b/server/remote/gogs/helper_test.go @@ -18,11 +18,11 @@ import ( "bytes" "testing" - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/woodpecker-ci/woodpecker/server/remote/gogs/fixtures" - "github.com/franela/goblin" "github.com/gogits/go-gogs-client" + + "github.com/woodpecker-ci/woodpecker/server/model" + "github.com/woodpecker-ci/woodpecker/server/remote/gogs/fixtures" ) func Test_parse(t *testing.T) { diff --git a/server/remote/mocks/remote.go b/server/remote/mocks/remote.go index feac2d7d7..4329443b7 100644 --- a/server/remote/mocks/remote.go +++ b/server/remote/mocks/remote.go @@ -9,7 +9,6 @@ import ( mock "github.com/stretchr/testify/mock" model "github.com/woodpecker-ci/woodpecker/server/model" - remote "github.com/woodpecker-ci/woodpecker/server/remote" ) diff --git a/server/router/middleware/config.go b/server/router/middleware/config.go index 631304711..4888050c9 100644 --- a/server/router/middleware/config.go +++ b/server/router/middleware/config.go @@ -15,10 +15,10 @@ package middleware import ( - "github.com/woodpecker-ci/woodpecker/server/model" - "github.com/gin-gonic/gin" "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/server/model" ) const configKey = "config" diff --git a/vendor/github.com/gin-gonic/contrib/ginrus/ginrus.go b/server/router/middleware/logger/logger.go similarity index 53% rename from vendor/github.com/gin-gonic/contrib/ginrus/ginrus.go rename to server/router/middleware/logger/logger.go index fecd1f717..13503f966 100644 --- a/vendor/github.com/gin-gonic/contrib/ginrus/ginrus.go +++ b/server/router/middleware/logger/logger.go @@ -1,28 +1,21 @@ -// Package ginrus provides log handling using logrus package. -// -// Based on github.com/stephenmuss/ginerus but adds more options. -package ginrus +package logger import ( "time" "github.com/gin-gonic/gin" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" ) -type loggerEntryWithFields interface { - WithFields(fields logrus.Fields) *logrus.Entry -} - -// Ginrus returns a gin.HandlerFunc (middleware) that logs requests using logrus. +// Logger returns a gin.HandlerFunc (middleware) that logs requests using zerolog. // -// Requests with errors are logged using logrus.Error(). -// Requests without errors are logged using logrus.Info(). +// Requests with errors are logged using log.Err(). +// Requests without errors are logged using log.Info(). // // It receives: // 1. A time package format string (e.g. time.RFC3339). // 2. A boolean stating whether to use UTC time zone or local. -func Ginrus(logger loggerEntryWithFields, timeFormat string, utc bool) gin.HandlerFunc { +func Logger(timeFormat string, utc bool) gin.HandlerFunc { return func(c *gin.Context) { start := time.Now() // some evil middlewares modify this values @@ -35,7 +28,7 @@ func Ginrus(logger loggerEntryWithFields, timeFormat string, utc bool) gin.Handl end = end.UTC() } - entry := logger.WithFields(logrus.Fields{ + entry := map[string]interface{}{ "status": c.Writer.Status(), "method": c.Request.Method, "path": path, @@ -43,13 +36,13 @@ func Ginrus(logger loggerEntryWithFields, timeFormat string, utc bool) gin.Handl "latency": latency, "user-agent": c.Request.UserAgent(), "time": end.Format(timeFormat), - }) + } if len(c.Errors) > 0 { // Append error field if this is an erroneous request. - entry.Error(c.Errors.String()) + log.Error().Str("error", c.Errors.String()).Fields(entry).Msg("") } else { - entry.Info() + log.Info().Fields(entry).Msg("") } } } diff --git a/server/router/middleware/remote.go b/server/router/middleware/remote.go index 1c92ee19f..c04ca6cef 100644 --- a/server/router/middleware/remote.go +++ b/server/router/middleware/remote.go @@ -16,6 +16,7 @@ package middleware import ( "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/remote" ) diff --git a/server/router/middleware/session/agent.go b/server/router/middleware/session/agent.go index da9a814d5..d679676f8 100644 --- a/server/router/middleware/session/agent.go +++ b/server/router/middleware/session/agent.go @@ -16,6 +16,7 @@ package session import ( "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/shared/token" ) diff --git a/server/router/middleware/session/repo.go b/server/router/middleware/session/repo.go index 0f525e06d..5c2e346c1 100644 --- a/server/router/middleware/session/repo.go +++ b/server/router/middleware/session/repo.go @@ -18,12 +18,13 @@ import ( "net/http" "time" + "github.com/rs/zerolog/log" + + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/store" - - "github.com/gin-gonic/gin" - log "github.com/sirupsen/logrus" ) func Repo(c *gin.Context) *model.Repo { @@ -54,7 +55,7 @@ func SetRepo() gin.HandlerFunc { } // debugging - log.Debugf("Cannot find repository %s/%s. %s", + log.Debug().Msgf("Cannot find repository %s/%s. %s", owner, name, err.Error(), @@ -91,13 +92,13 @@ func SetPerm() gin.HandlerFunc { var err error perm, err = store.FromContext(c).PermFind(user, repo) if err != nil { - log.Errorf("Error fetching permission for %s %s. %s", + log.Error().Msgf("Error fetching permission for %s %s. %s", user.Login, repo.FullName, err) } if time.Unix(perm.Synced, 0).Add(time.Hour).Before(time.Now()) { perm, err = remote.FromContext(c).Perm(c, user, repo.Owner, repo.Name) if err == nil { - log.Debugf("Synced user permission for %s %s", user.Login, repo.FullName) + log.Debug().Msgf("Synced user permission for %s %s", user.Login, repo.FullName) perm.Repo = repo.FullName perm.UserID = user.ID perm.Synced = time.Now().Unix() @@ -124,11 +125,11 @@ func SetPerm() gin.HandlerFunc { } if user != nil { - log.Debugf("%s granted %+v permission to %s", + log.Debug().Msgf("%s granted %+v permission to %s", user.Login, perm, repo.FullName) } else { - log.Debugf("Guest granted %+v to %s", perm, repo.FullName) + log.Debug().Msgf("Guest granted %+v to %s", perm, repo.FullName) } c.Set("perm", perm) @@ -148,11 +149,11 @@ func MustPull(c *gin.Context) { // debugging if user != nil { c.AbortWithStatus(http.StatusNotFound) - log.Debugf("User %s denied read access to %s", + log.Debug().Msgf("User %s denied read access to %s", user.Login, c.Request.URL.Path) } else { c.AbortWithStatus(http.StatusUnauthorized) - log.Debugf("Guest denied read access to %s %s", + log.Debug().Msgf("Guest denied read access to %s %s", c.Request.Method, c.Request.URL.Path, ) @@ -173,12 +174,12 @@ func MustPush(c *gin.Context) { // debugging if user != nil { c.AbortWithStatus(http.StatusNotFound) - log.Debugf("User %s denied write access to %s", + log.Debug().Msgf("User %s denied write access to %s", user.Login, c.Request.URL.Path) } else { c.AbortWithStatus(http.StatusUnauthorized) - log.Debugf("Guest denied write access to %s %s", + log.Debug().Msgf("Guest denied write access to %s %s", c.Request.Method, c.Request.URL.Path, ) diff --git a/server/router/middleware/session/user.go b/server/router/middleware/session/user.go index 6103c87b0..b9f9d8f97 100644 --- a/server/router/middleware/session/user.go +++ b/server/router/middleware/session/user.go @@ -17,11 +17,11 @@ package session import ( "net/http" + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/shared/token" - - "github.com/gin-gonic/gin" ) func User(c *gin.Context) *model.User { diff --git a/server/router/middleware/store.go b/server/router/middleware/store.go index 6c38281ba..e46f8aef7 100644 --- a/server/router/middleware/store.go +++ b/server/router/middleware/store.go @@ -15,10 +15,10 @@ package middleware import ( - "github.com/urfave/cli" - "github.com/woodpecker-ci/woodpecker/server/store" - "github.com/gin-gonic/gin" + "github.com/urfave/cli" + + "github.com/woodpecker-ci/woodpecker/server/store" ) // Store is a middleware function that initializes the Datastore and attaches to diff --git a/server/router/middleware/token/token.go b/server/router/middleware/token/token.go index 371c35823..c13391561 100644 --- a/server/router/middleware/token/token.go +++ b/server/router/middleware/token/token.go @@ -17,12 +17,13 @@ package token import ( "time" + "github.com/rs/zerolog/log" + + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" "github.com/woodpecker-ci/woodpecker/server/store" - - "github.com/gin-gonic/gin" - log "github.com/sirupsen/logrus" ) func Refresh(c *gin.Context) { @@ -58,9 +59,9 @@ func Refresh(c *gin.Context) { if err != nil { // we only log the error at this time. not sure // if we really want to fail the request, do we? - log.Errorf("cannot refresh access token for %s. %s", user.Login, err) + log.Error().Msgf("cannot refresh access token for %s. %s", user.Login, err) } else { - log.Infof("refreshed access token for %s", user.Login) + log.Info().Msgf("refreshed access token for %s", user.Login) } } diff --git a/server/router/middleware/version.go b/server/router/middleware/version.go index c68b61650..ebd85a577 100644 --- a/server/router/middleware/version.go +++ b/server/router/middleware/version.go @@ -16,6 +16,7 @@ package middleware import ( "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/version" ) diff --git a/server/router/router.go b/server/router/router.go index 559d2d408..c9737599d 100644 --- a/server/router/router.go +++ b/server/router/router.go @@ -17,6 +17,9 @@ package router import ( "net/http" + "github.com/gin-gonic/gin" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server/api" "github.com/woodpecker-ci/woodpecker/server/api/debug" "github.com/woodpecker-ci/woodpecker/server/api/metrics" @@ -24,9 +27,6 @@ import ( "github.com/woodpecker-ci/woodpecker/server/router/middleware/session" "github.com/woodpecker-ci/woodpecker/server/router/middleware/token" "github.com/woodpecker-ci/woodpecker/server/web" - - "github.com/gin-gonic/gin" - "github.com/sirupsen/logrus" ) // Load loads the router @@ -36,7 +36,7 @@ func Load(serveHTTP func(w http.ResponseWriter, r *http.Request), middleware ... e.Use(gin.Recovery()) e.Use(func(c *gin.Context) { - logrus.Tracef("[%s] %s", c.Request.Method, c.Request.URL.String()) + log.Trace().Msgf("[%s] %s", c.Request.Method, c.Request.URL.String()) c.Next() }) diff --git a/server/shared/configFetcher.go b/server/shared/configFetcher.go index be7694c34..d42b91f64 100644 --- a/server/shared/configFetcher.go +++ b/server/shared/configFetcher.go @@ -7,10 +7,10 @@ import ( "strings" "time" + "github.com/rs/zerolog/log" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" - - "github.com/sirupsen/logrus" ) type configFetcher struct { @@ -31,7 +31,7 @@ func NewConfigFetcher(remote remote.Remote, user *model.User, repo *model.Repo, // Fetch pipeline config from source forge func (cf *configFetcher) Fetch(ctx context.Context) (files []*remote.FileMeta, err error) { - logrus.Tracef("Start Fetching config for '%s'", cf.repo.FullName) + log.Trace().Msgf("Start Fetching config for '%s'", cf.repo.FullName) // try to fetch 3 times, timeout is one second longer each time for i := 0; i < 3; i++ { @@ -51,12 +51,12 @@ func (cf *configFetcher) fetch(c context.Context, timeout time.Duration, config defer cancel() if len(config) > 0 { - logrus.Tracef("ConfigFetch[%s]: use user config '%s'", cf.repo.FullName, config) + log.Trace().Msgf("ConfigFetch[%s]: use user config '%s'", cf.repo.FullName, config) // either a file if !strings.HasSuffix(config, "/") { file, err := cf.remote_.File(ctx, cf.user, cf.repo, cf.build, config) if err == nil && len(file) != 0 { - logrus.Tracef("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) + log.Trace().Msgf("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) return []*remote.FileMeta{{ Name: config, Data: file, @@ -67,14 +67,14 @@ func (cf *configFetcher) fetch(c context.Context, timeout time.Duration, config // or a folder files, err := cf.remote_.Dir(ctx, cf.user, cf.repo, cf.build, strings.TrimSuffix(config, "/")) if err == nil && len(files) != 0 { - logrus.Tracef("ConfigFetch[%s]: found %d files in '%s'", cf.repo.FullName, len(files), config) + log.Trace().Msgf("ConfigFetch[%s]: found %d files in '%s'", cf.repo.FullName, len(files), config) return filterPipelineFiles(files), nil } return nil, fmt.Errorf("config '%s' not found: %s", config, err) } - logrus.Tracef("ConfigFetch[%s]: user did not defined own config follow default procedure", cf.repo.FullName) + log.Trace().Msgf("ConfigFetch[%s]: user did not defined own config follow default procedure", cf.repo.FullName) // no user defined config so try .woodpecker/*.yml -> .woodpecker.yml -> .drone.yml // test .woodpecker/ folder @@ -83,14 +83,14 @@ func (cf *configFetcher) fetch(c context.Context, timeout time.Duration, config files, err := cf.remote_.Dir(ctx, cf.user, cf.repo, cf.build, config) files = filterPipelineFiles(files) if err == nil && len(files) != 0 { - logrus.Tracef("ConfigFetch[%s]: found %d files in '%s'", cf.repo.FullName, len(files), config) + log.Trace().Msgf("ConfigFetch[%s]: found %d files in '%s'", cf.repo.FullName, len(files), config) return files, nil } config = ".woodpecker.yml" file, err := cf.remote_.File(ctx, cf.user, cf.repo, cf.build, config) if err == nil && len(file) != 0 { - logrus.Tracef("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) + log.Trace().Msgf("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) return []*remote.FileMeta{{ Name: config, Data: file, @@ -100,7 +100,7 @@ func (cf *configFetcher) fetch(c context.Context, timeout time.Duration, config config = ".drone.yml" file, err = cf.remote_.File(ctx, cf.user, cf.repo, cf.build, config) if err == nil && len(file) != 0 { - logrus.Tracef("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) + log.Trace().Msgf("ConfigFetch[%s]: found file '%s'", cf.repo.FullName, config) return []*remote.FileMeta{{ Name: config, Data: file, diff --git a/server/shared/configFetcher_test.go b/server/shared/configFetcher_test.go index 95cead727..7374393e8 100644 --- a/server/shared/configFetcher_test.go +++ b/server/shared/configFetcher_test.go @@ -6,13 +6,13 @@ import ( "path/filepath" "testing" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/mock" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/remote" "github.com/woodpecker-ci/woodpecker/server/remote/mocks" "github.com/woodpecker-ci/woodpecker/server/shared" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/mock" ) func TestFetch(t *testing.T) { diff --git a/server/shared/procBuilder.go b/server/shared/procBuilder.go index 09d1dedbd..fe4971131 100644 --- a/server/shared/procBuilder.go +++ b/server/shared/procBuilder.go @@ -23,6 +23,7 @@ import ( "strings" "github.com/drone/envsubst" + "github.com/woodpecker-ci/woodpecker/pipeline/backend" "github.com/woodpecker-ci/woodpecker/pipeline/frontend" "github.com/woodpecker-ci/woodpecker/pipeline/frontend/yaml" diff --git a/server/store/datastore/builds.go b/server/store/datastore/builds.go index 3cdbc6024..bdb223552 100644 --- a/server/store/datastore/builds.go +++ b/server/store/datastore/builds.go @@ -19,6 +19,7 @@ import ( "time" "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/builds_test.go b/server/store/datastore/builds_test.go index 3c26b7fed..9fe361e9c 100644 --- a/server/store/datastore/builds_test.go +++ b/server/store/datastore/builds_test.go @@ -19,6 +19,7 @@ import ( "testing" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" ) diff --git a/server/store/datastore/config.go b/server/store/datastore/config.go index 32fc90e03..1a0fca4bc 100644 --- a/server/store/datastore/config.go +++ b/server/store/datastore/config.go @@ -18,6 +18,7 @@ import ( gosql "database/sql" "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/files.go b/server/store/datastore/files.go index 185ee12f5..9b29ab768 100644 --- a/server/store/datastore/files.go +++ b/server/store/datastore/files.go @@ -19,10 +19,10 @@ import ( "io" "io/ioutil" + "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" - - "github.com/russross/meddler" ) func (db *datastore) FileList(build *model.Build) ([]*model.File, error) { diff --git a/server/store/datastore/logs.go b/server/store/datastore/logs.go index af23fdcf6..49777dc38 100644 --- a/server/store/datastore/logs.go +++ b/server/store/datastore/logs.go @@ -20,6 +20,7 @@ import ( "io/ioutil" "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/perms.go b/server/store/datastore/perms.go index 45d1d42f6..2b83b6e3c 100644 --- a/server/store/datastore/perms.go +++ b/server/store/datastore/perms.go @@ -15,10 +15,10 @@ package datastore import ( + "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" - - "github.com/russross/meddler" ) func (db *datastore) PermFind(user *model.User, repo *model.Repo) (*model.Perm, error) { diff --git a/server/store/datastore/procs.go b/server/store/datastore/procs.go index 9ee3f996f..172f31556 100644 --- a/server/store/datastore/procs.go +++ b/server/store/datastore/procs.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/registry.go b/server/store/datastore/registry.go index 659699e77..06cadbd00 100644 --- a/server/store/datastore/registry.go +++ b/server/store/datastore/registry.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/repos.go b/server/store/datastore/repos.go index d6a4f9bbe..10a088d94 100644 --- a/server/store/datastore/repos.go +++ b/server/store/datastore/repos.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/repos_test.go b/server/store/datastore/repos_test.go index 3c074c23a..8cdccea99 100644 --- a/server/store/datastore/repos_test.go +++ b/server/store/datastore/repos_test.go @@ -18,6 +18,7 @@ import ( "testing" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" ) diff --git a/server/store/datastore/secret.go b/server/store/datastore/secret.go index c19bfbae9..3d2bfe5fa 100644 --- a/server/store/datastore/secret.go +++ b/server/store/datastore/secret.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/sender.go b/server/store/datastore/sender.go index 279f3b158..983c90d35 100644 --- a/server/store/datastore/sender.go +++ b/server/store/datastore/sender.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/store.go b/server/store/datastore/store.go index 87c3e8bac..6fd0c65cb 100644 --- a/server/store/datastore/store.go +++ b/server/store/datastore/store.go @@ -19,11 +19,11 @@ import ( "os" "time" + "github.com/rs/zerolog/log" "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/store" "github.com/woodpecker-ci/woodpecker/server/store/datastore/ddl" - - "github.com/sirupsen/logrus" ) // datastore is an implementation of a model.Store built on top @@ -55,8 +55,7 @@ func From(db *sql.DB) store.Store { func open(driver, config string) *sql.DB { db, err := sql.Open(driver, config) if err != nil { - logrus.Errorln(err) - logrus.Fatalln("database connection failed") + log.Fatal().Err(err).Msg("database connection failed") } if driver == "mysql" { // per issue https://github.com/go-sql-driver/mysql/issues/257 @@ -66,13 +65,11 @@ func open(driver, config string) *sql.DB { setupMeddler(driver) if err := pingDatabase(db); err != nil { - logrus.Errorln(err) - logrus.Fatalln("database ping attempts failed") + log.Fatal().Err(err).Msg("database ping attempts failed") } if err := setupDatabase(driver, db); err != nil { - logrus.Errorln(err) - logrus.Fatalln("migration failed") + log.Fatal().Err(err).Msg("migration failed") } return db } @@ -120,7 +117,7 @@ func pingDatabase(db *sql.DB) (err error) { if err == nil { return } - logrus.Infof("database ping failed. retry in 1s") + log.Info().Msgf("database ping failed. retry in 1s") time.Sleep(time.Second) } return diff --git a/server/store/datastore/task.go b/server/store/datastore/task.go index db8f901e6..276115450 100644 --- a/server/store/datastore/task.go +++ b/server/store/datastore/task.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/users.go b/server/store/datastore/users.go index 7a227e5fc..5cf04844b 100644 --- a/server/store/datastore/users.go +++ b/server/store/datastore/users.go @@ -16,6 +16,7 @@ package datastore import ( "github.com/russross/meddler" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/server/store/datastore/sql" ) diff --git a/server/store/datastore/users_test.go b/server/store/datastore/users_test.go index 7e860b437..6f24b4351 100644 --- a/server/store/datastore/users_test.go +++ b/server/store/datastore/users_test.go @@ -18,6 +18,7 @@ import ( "testing" "github.com/franela/goblin" + "github.com/woodpecker-ci/woodpecker/server/model" ) diff --git a/server/store/store.go b/server/store/store.go index 481ea834e..2012d7359 100644 --- a/server/store/store.go +++ b/server/store/store.go @@ -17,9 +17,9 @@ package store import ( "io" - "github.com/woodpecker-ci/woodpecker/server/model" - "golang.org/x/net/context" + + "github.com/woodpecker-ci/woodpecker/server/model" ) type Store interface { diff --git a/server/web/web.go b/server/web/web.go index 48187ba59..b47858874 100644 --- a/server/web/web.go +++ b/server/web/web.go @@ -22,12 +22,12 @@ import ( "net/http" "time" + "github.com/gin-gonic/gin" + "github.com/woodpecker-ci/woodpecker/server/model" "github.com/woodpecker-ci/woodpecker/shared/token" "github.com/woodpecker-ci/woodpecker/version" "github.com/woodpecker-ci/woodpecker/web" - - "github.com/gin-gonic/gin" ) // Endpoint provides the website endpoints. diff --git a/shared/token/token.go b/shared/token/token.go index d0245b9c5..81391aa6b 100644 --- a/shared/token/token.go +++ b/shared/token/token.go @@ -19,7 +19,7 @@ import ( "net/http" "github.com/golang-jwt/jwt/v4" - "github.com/sirupsen/logrus" + "github.com/rs/zerolog/log" ) type SecretFunc func(*Token) (string, error) @@ -57,7 +57,7 @@ func ParseRequest(r *http.Request, fn SecretFunc) (*Token, error) { // first we attempt to get the token from the // authorization header. if len(token) != 0 { - logrus.Tracef("token.ParseRequest: found token in header: %s", token) + log.Trace().Msgf("token.ParseRequest: found token in header: %s", token) bearer := token if _, err := fmt.Sscanf(token, "Bearer %s", &bearer); err != nil { return nil, err diff --git a/vendor/github.com/gin-gonic/contrib/LICENSE b/vendor/github.com/gin-gonic/contrib/LICENSE deleted file mode 100644 index 068ac3d21..000000000 --- a/vendor/github.com/gin-gonic/contrib/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2019 Gin-Gonic - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/gin-gonic/contrib/ginrus/README.md b/vendor/github.com/gin-gonic/contrib/ginrus/README.md deleted file mode 100644 index 02f755ebb..000000000 --- a/vendor/github.com/gin-gonic/contrib/ginrus/README.md +++ /dev/null @@ -1,5 +0,0 @@ -# cache - -## EOL-warning - -**This package has been abandoned on 2018-10-31. Please use [gin-contrib/logger](https://github.com/gin-contrib/logger) instead.** diff --git a/vendor/github.com/rs/zerolog/cmd/lint/lint.go b/vendor/github.com/rs/zerolog/cmd/lint/lint.go new file mode 100644 index 000000000..700371f26 --- /dev/null +++ b/vendor/github.com/rs/zerolog/cmd/lint/lint.go @@ -0,0 +1,175 @@ +package main + +import ( + "flag" + "fmt" + "go/ast" + "go/token" + "go/types" + "os" + "path/filepath" + "strings" + + "golang.org/x/tools/go/loader" +) + +var ( + recursivelyIgnoredPkgs arrayFlag + ignoredPkgs arrayFlag + ignoredFiles arrayFlag + allowedFinishers arrayFlag = []string{"Msg", "Msgf"} + rootPkg string +) + +// parse input flags and args +func init() { + flag.Var(&recursivelyIgnoredPkgs, "ignorePkgRecursively", "ignore the specified package and all subpackages recursively") + flag.Var(&ignoredPkgs, "ignorePkg", "ignore the specified package") + flag.Var(&ignoredFiles, "ignoreFile", "ignore the specified file by its path and/or go path (package/file.go)") + flag.Var(&allowedFinishers, "finisher", "allowed finisher for the event chain") + flag.Parse() + + // add zerolog to recursively ignored packages + recursivelyIgnoredPkgs = append(recursivelyIgnoredPkgs, "github.com/rs/zerolog") + args := flag.Args() + if len(args) != 1 { + fmt.Fprintln(os.Stderr, "you must provide exactly one package path") + os.Exit(1) + } + rootPkg = args[0] +} + +func main() { + // load the package and all its dependencies + conf := loader.Config{} + conf.Import(rootPkg) + p, err := conf.Load() + if err != nil { + fmt.Fprintf(os.Stderr, "Error: unable to load the root package. %s\n", err.Error()) + os.Exit(1) + } + + // get the github.com/rs/zerolog.Event type + event := getEvent(p) + if event == nil { + fmt.Fprintln(os.Stderr, "Error: github.com/rs/zerolog.Event declaration not found, maybe zerolog is not imported in the scanned package?") + os.Exit(1) + } + + // get all selections (function calls) with the github.com/rs/zerolog.Event (or pointer) receiver + selections := getSelectionsWithReceiverType(p, event) + + // print the violations (if any) + hasViolations := false + for _, s := range selections { + if hasBadFinisher(p, s) { + hasViolations = true + fmt.Printf("Error: missing or bad finisher for log chain, last call: %q at: %s:%v\n", s.fn.Name(), p.Fset.File(s.Pos()).Name(), p.Fset.Position(s.Pos()).Line) + } + } + + // if no violations detected, return normally + if !hasViolations { + fmt.Println("No violations found") + return + } + + // if violations were detected, return error code + os.Exit(1) +} + +func getEvent(p *loader.Program) types.Type { + for _, pkg := range p.AllPackages { + if strings.HasSuffix(pkg.Pkg.Path(), "github.com/rs/zerolog") { + for _, d := range pkg.Defs { + if d != nil && d.Name() == "Event" { + return d.Type() + } + } + } + } + + return nil +} + +func getSelectionsWithReceiverType(p *loader.Program, targetType types.Type) map[token.Pos]selection { + selections := map[token.Pos]selection{} + + for _, z := range p.AllPackages { + for i, t := range z.Selections { + switch o := t.Obj().(type) { + case *types.Func: + // this is not a bug, o.Type() is always *types.Signature, see docs + if vt := o.Type().(*types.Signature).Recv(); vt != nil { + typ := vt.Type() + if pointer, ok := typ.(*types.Pointer); ok { + typ = pointer.Elem() + } + + if typ == targetType { + if s, ok := selections[i.Pos()]; !ok || i.End() > s.End() { + selections[i.Pos()] = selection{i, o, z.Pkg} + } + } + } + default: + // skip + } + } + } + + return selections +} + +func hasBadFinisher(p *loader.Program, s selection) bool { + pkgPath := strings.TrimPrefix(s.pkg.Path(), rootPkg+"/vendor/") + absoluteFilePath := strings.TrimPrefix(p.Fset.File(s.Pos()).Name(), rootPkg+"/vendor/") + goFilePath := pkgPath + "/" + filepath.Base(p.Fset.Position(s.Pos()).Filename) + + for _, f := range allowedFinishers { + if f == s.fn.Name() { + return false + } + } + + for _, ignoredPkg := range recursivelyIgnoredPkgs { + if strings.HasPrefix(pkgPath, ignoredPkg) { + return false + } + } + + for _, ignoredPkg := range ignoredPkgs { + if pkgPath == ignoredPkg { + return false + } + } + + for _, ignoredFile := range ignoredFiles { + if absoluteFilePath == ignoredFile { + return false + } + + if goFilePath == ignoredFile { + return false + } + } + + return true +} + +type arrayFlag []string + +func (i *arrayFlag) String() string { + return fmt.Sprintf("%v", []string(*i)) +} + +func (i *arrayFlag) Set(value string) error { + *i = append(*i, value) + return nil +} + +type selection struct { + *ast.SelectorExpr + fn *types.Func + pkg *types.Package +} diff --git a/vendor/github.com/rs/zerolog/cmd/lint/readme.md b/vendor/github.com/rs/zerolog/cmd/lint/readme.md new file mode 100644 index 000000000..a15cba52f --- /dev/null +++ b/vendor/github.com/rs/zerolog/cmd/lint/readme.md @@ -0,0 +1,37 @@ +# Zerolog Lint + +This is a basic linter that checks for missing log event finishers. Finds errors like: `log.Error().Int64("userID": 5)` - missing the `Msg`/`Msgf` finishers. + +## Problem + +When using zerolog it's easy to forget to finish the log event chain by calling a finisher - the `Msg` or `Msgf` function that will schedule the event for writing. The problem with this is that it doesn't warn/panic during compilation and it's not easily found by grep or other general tools. It's even prominently mentioned in the project's readme, that: + +> It is very important to note that when using the **zerolog** chaining API, as shown above (`log.Info().Msg("hello world"`), the chain must have either the `Msg` or `Msgf` method call. If you forget to add either of these, the log will not occur and there is no compile time error to alert you of this. + +## Solution + +A basic linter like this one here that looks for method invocations on `zerolog.Event` can examine the last call in a method call chain and check if it is a finisher, thus pointing out these errors. + +## Usage + +Just compile this and then run it. Or just run it via `go run` command via something like `go run cmd/lint/lint.go`. + +The command accepts only one argument - the package to be inspected - and 4 optional flags, all of which can occur multiple times. The standard synopsis of the command is: + +`lint [-finisher value] [-ignoreFile value] [-ignorePkg value] [-ignorePkgRecursively value] package` + +#### Flags + +- finisher + - specify which finishers to accept, defaults to `Msg` and `Msgf` +- ignoreFile + - which files to ignore, either by full path or by go path (package/file.go) +- ignorePkg + - do not inspect the specified package if found in the dependecy tree +- ignorePkgRecursively + - do not inspect the specified package or its subpackages if found in the dependency tree + +## Drawbacks + +As it is, linter can generate a false positives in a specific case. These false positives come from the fact that if you have a method that returns a `zerolog.Event` the linter will flag it because you are obviously not finishing the event. This will be solved in later release. + diff --git a/vendor/golang.org/x/sys/execabs/execabs.go b/vendor/golang.org/x/sys/execabs/execabs.go new file mode 100644 index 000000000..78192498d --- /dev/null +++ b/vendor/golang.org/x/sys/execabs/execabs.go @@ -0,0 +1,102 @@ +// Copyright 2020 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package execabs is a drop-in replacement for os/exec +// that requires PATH lookups to find absolute paths. +// That is, execabs.Command("cmd") runs the same PATH lookup +// as exec.Command("cmd"), but if the result is a path +// which is relative, the Run and Start methods will report +// an error instead of running the executable. +// +// See https://blog.golang.org/path-security for more information +// about when it may be necessary or appropriate to use this package. +package execabs + +import ( + "context" + "fmt" + "os/exec" + "path/filepath" + "reflect" + "unsafe" +) + +// ErrNotFound is the error resulting if a path search failed to find an executable file. +// It is an alias for exec.ErrNotFound. +var ErrNotFound = exec.ErrNotFound + +// Cmd represents an external command being prepared or run. +// It is an alias for exec.Cmd. +type Cmd = exec.Cmd + +// Error is returned by LookPath when it fails to classify a file as an executable. +// It is an alias for exec.Error. +type Error = exec.Error + +// An ExitError reports an unsuccessful exit by a command. +// It is an alias for exec.ExitError. +type ExitError = exec.ExitError + +func relError(file, path string) error { + return fmt.Errorf("%s resolves to executable in current directory (.%c%s)", file, filepath.Separator, path) +} + +// LookPath searches for an executable named file in the directories +// named by the PATH environment variable. If file contains a slash, +// it is tried directly and the PATH is not consulted. The result will be +// an absolute path. +// +// LookPath differs from exec.LookPath in its handling of PATH lookups, +// which are used for file names without slashes. If exec.LookPath's +// PATH lookup would have returned an executable from the current directory, +// LookPath instead returns an error. +func LookPath(file string) (string, error) { + path, err := exec.LookPath(file) + if err != nil { + return "", err + } + if filepath.Base(file) == file && !filepath.IsAbs(path) { + return "", relError(file, path) + } + return path, nil +} + +func fixCmd(name string, cmd *exec.Cmd) { + if filepath.Base(name) == name && !filepath.IsAbs(cmd.Path) { + // exec.Command was called with a bare binary name and + // exec.LookPath returned a path which is not absolute. + // Set cmd.lookPathErr and clear cmd.Path so that it + // cannot be run. + lookPathErr := (*error)(unsafe.Pointer(reflect.ValueOf(cmd).Elem().FieldByName("lookPathErr").Addr().Pointer())) + if *lookPathErr == nil { + *lookPathErr = relError(name, cmd.Path) + } + cmd.Path = "" + } +} + +// CommandContext is like Command but includes a context. +// +// The provided context is used to kill the process (by calling os.Process.Kill) +// if the context becomes done before the command completes on its own. +func CommandContext(ctx context.Context, name string, arg ...string) *exec.Cmd { + cmd := exec.CommandContext(ctx, name, arg...) + fixCmd(name, cmd) + return cmd + +} + +// Command returns the Cmd struct to execute the named program with the given arguments. +// See exec.Command for most details. +// +// Command differs from exec.Command in its handling of PATH lookups, +// which are used when the program name contains no slashes. +// If exec.Command would have returned an exec.Cmd configured to run an +// executable from the current directory, Command instead +// returns an exec.Cmd that will return an error from Start or Run. +func Command(name string, arg ...string) *exec.Cmd { + cmd := exec.Command(name, arg...) + fixCmd(name, cmd) + return cmd +} diff --git a/vendor/golang.org/x/tools/AUTHORS b/vendor/golang.org/x/tools/AUTHORS new file mode 100644 index 000000000..15167cd74 --- /dev/null +++ b/vendor/golang.org/x/tools/AUTHORS @@ -0,0 +1,3 @@ +# This source code refers to The Go Authors for copyright purposes. +# The master list of authors is in the main Go distribution, +# visible at http://tip.golang.org/AUTHORS. diff --git a/vendor/golang.org/x/tools/CONTRIBUTORS b/vendor/golang.org/x/tools/CONTRIBUTORS new file mode 100644 index 000000000..1c4577e96 --- /dev/null +++ b/vendor/golang.org/x/tools/CONTRIBUTORS @@ -0,0 +1,3 @@ +# This source code was written by the Go contributors. +# The master list of contributors is in the main Go distribution, +# visible at http://tip.golang.org/CONTRIBUTORS. diff --git a/vendor/golang.org/x/tools/LICENSE b/vendor/golang.org/x/tools/LICENSE new file mode 100644 index 000000000..6a66aea5e --- /dev/null +++ b/vendor/golang.org/x/tools/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2009 The Go Authors. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/golang.org/x/tools/PATENTS b/vendor/golang.org/x/tools/PATENTS new file mode 100644 index 000000000..733099041 --- /dev/null +++ b/vendor/golang.org/x/tools/PATENTS @@ -0,0 +1,22 @@ +Additional IP Rights Grant (Patents) + +"This implementation" means the copyrightable works distributed by +Google as part of the Go project. + +Google hereby grants to You a perpetual, worldwide, non-exclusive, +no-charge, royalty-free, irrevocable (except as stated in this section) +patent license to make, have made, use, offer to sell, sell, import, +transfer and otherwise run, modify and propagate the contents of this +implementation of Go, where such license applies only to those patent +claims, both currently owned or controlled by Google and acquired in +the future, licensable by Google that are necessarily infringed by this +implementation of Go. This grant does not include claims that would be +infringed only as a consequence of further modification of this +implementation. If you or your agent or exclusive licensee institute or +order or agree to the institution of patent litigation against any +entity (including a cross-claim or counterclaim in a lawsuit) alleging +that this implementation of Go or any code incorporated within this +implementation of Go constitutes direct or contributory patent +infringement, or inducement of patent infringement, then any patent +rights granted to you under this License for this implementation of Go +shall terminate as of the date such litigation is filed. diff --git a/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go new file mode 100644 index 000000000..6b7052b89 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/enclosing.go @@ -0,0 +1,627 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +// This file defines utilities for working with source positions. + +import ( + "fmt" + "go/ast" + "go/token" + "sort" +) + +// PathEnclosingInterval returns the node that encloses the source +// interval [start, end), and all its ancestors up to the AST root. +// +// The definition of "enclosing" used by this function considers +// additional whitespace abutting a node to be enclosed by it. +// In this example: +// +// z := x + y // add them +// <-A-> +// <----B-----> +// +// the ast.BinaryExpr(+) node is considered to enclose interval B +// even though its [Pos()..End()) is actually only interval A. +// This behaviour makes user interfaces more tolerant of imperfect +// input. +// +// This function treats tokens as nodes, though they are not included +// in the result. e.g. PathEnclosingInterval("+") returns the +// enclosing ast.BinaryExpr("x + y"). +// +// If start==end, the 1-char interval following start is used instead. +// +// The 'exact' result is true if the interval contains only path[0] +// and perhaps some adjacent whitespace. It is false if the interval +// overlaps multiple children of path[0], or if it contains only +// interior whitespace of path[0]. +// In this example: +// +// z := x + y // add them +// <--C--> <---E--> +// ^ +// D +// +// intervals C, D and E are inexact. C is contained by the +// z-assignment statement, because it spans three of its children (:=, +// x, +). So too is the 1-char interval D, because it contains only +// interior whitespace of the assignment. E is considered interior +// whitespace of the BlockStmt containing the assignment. +// +// Precondition: [start, end) both lie within the same file as root. +// TODO(adonovan): return (nil, false) in this case and remove precond. +// Requires FileSet; see loader.tokenFileContainsPos. +// +// Postcondition: path is never nil; it always contains at least 'root'. +// +func PathEnclosingInterval(root *ast.File, start, end token.Pos) (path []ast.Node, exact bool) { + // fmt.Printf("EnclosingInterval %d %d\n", start, end) // debugging + + // Precondition: node.[Pos..End) and adjoining whitespace contain [start, end). + var visit func(node ast.Node) bool + visit = func(node ast.Node) bool { + path = append(path, node) + + nodePos := node.Pos() + nodeEnd := node.End() + + // fmt.Printf("visit(%T, %d, %d)\n", node, nodePos, nodeEnd) // debugging + + // Intersect [start, end) with interval of node. + if start < nodePos { + start = nodePos + } + if end > nodeEnd { + end = nodeEnd + } + + // Find sole child that contains [start, end). + children := childrenOf(node) + l := len(children) + for i, child := range children { + // [childPos, childEnd) is unaugmented interval of child. + childPos := child.Pos() + childEnd := child.End() + + // [augPos, augEnd) is whitespace-augmented interval of child. + augPos := childPos + augEnd := childEnd + if i > 0 { + augPos = children[i-1].End() // start of preceding whitespace + } + if i < l-1 { + nextChildPos := children[i+1].Pos() + // Does [start, end) lie between child and next child? + if start >= augEnd && end <= nextChildPos { + return false // inexact match + } + augEnd = nextChildPos // end of following whitespace + } + + // fmt.Printf("\tchild %d: [%d..%d)\tcontains interval [%d..%d)?\n", + // i, augPos, augEnd, start, end) // debugging + + // Does augmented child strictly contain [start, end)? + if augPos <= start && end <= augEnd { + _, isToken := child.(tokenNode) + return isToken || visit(child) + } + + // Does [start, end) overlap multiple children? + // i.e. left-augmented child contains start + // but LR-augmented child does not contain end. + if start < childEnd && end > augEnd { + break + } + } + + // No single child contained [start, end), + // so node is the result. Is it exact? + + // (It's tempting to put this condition before the + // child loop, but it gives the wrong result in the + // case where a node (e.g. ExprStmt) and its sole + // child have equal intervals.) + if start == nodePos && end == nodeEnd { + return true // exact match + } + + return false // inexact: overlaps multiple children + } + + if start > end { + start, end = end, start + } + + if start < root.End() && end > root.Pos() { + if start == end { + end = start + 1 // empty interval => interval of size 1 + } + exact = visit(root) + + // Reverse the path: + for i, l := 0, len(path); i < l/2; i++ { + path[i], path[l-1-i] = path[l-1-i], path[i] + } + } else { + // Selection lies within whitespace preceding the + // first (or following the last) declaration in the file. + // The result nonetheless always includes the ast.File. + path = append(path, root) + } + + return +} + +// tokenNode is a dummy implementation of ast.Node for a single token. +// They are used transiently by PathEnclosingInterval but never escape +// this package. +// +type tokenNode struct { + pos token.Pos + end token.Pos +} + +func (n tokenNode) Pos() token.Pos { + return n.pos +} + +func (n tokenNode) End() token.Pos { + return n.end +} + +func tok(pos token.Pos, len int) ast.Node { + return tokenNode{pos, pos + token.Pos(len)} +} + +// childrenOf returns the direct non-nil children of ast.Node n. +// It may include fake ast.Node implementations for bare tokens. +// it is not safe to call (e.g.) ast.Walk on such nodes. +// +func childrenOf(n ast.Node) []ast.Node { + var children []ast.Node + + // First add nodes for all true subtrees. + ast.Inspect(n, func(node ast.Node) bool { + if node == n { // push n + return true // recur + } + if node != nil { // push child + children = append(children, node) + } + return false // no recursion + }) + + // Then add fake Nodes for bare tokens. + switch n := n.(type) { + case *ast.ArrayType: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Elt.End(), len("]"))) + + case *ast.AssignStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.BasicLit: + children = append(children, + tok(n.ValuePos, len(n.Value))) + + case *ast.BinaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.BlockStmt: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("}"))) + + case *ast.BranchStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.CallExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + if n.Ellipsis != 0 { + children = append(children, tok(n.Ellipsis, len("..."))) + } + + case *ast.CaseClause: + if n.List == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.ChanType: + switch n.Dir { + case ast.RECV: + children = append(children, tok(n.Begin, len("<-chan"))) + case ast.SEND: + children = append(children, tok(n.Begin, len("chan<-"))) + case ast.RECV | ast.SEND: + children = append(children, tok(n.Begin, len("chan"))) + } + + case *ast.CommClause: + if n.Comm == nil { + children = append(children, + tok(n.Case, len("default"))) + } else { + children = append(children, + tok(n.Case, len("case"))) + } + children = append(children, tok(n.Colon, len(":"))) + + case *ast.Comment: + // nop + + case *ast.CommentGroup: + // nop + + case *ast.CompositeLit: + children = append(children, + tok(n.Lbrace, len("{")), + tok(n.Rbrace, len("{"))) + + case *ast.DeclStmt: + // nop + + case *ast.DeferStmt: + children = append(children, + tok(n.Defer, len("defer"))) + + case *ast.Ellipsis: + children = append(children, + tok(n.Ellipsis, len("..."))) + + case *ast.EmptyStmt: + // nop + + case *ast.ExprStmt: + // nop + + case *ast.Field: + // TODO(adonovan): Field.{Doc,Comment,Tag}? + + case *ast.FieldList: + children = append(children, + tok(n.Opening, len("(")), + tok(n.Closing, len(")"))) + + case *ast.File: + // TODO test: Doc + children = append(children, + tok(n.Package, len("package"))) + + case *ast.ForStmt: + children = append(children, + tok(n.For, len("for"))) + + case *ast.FuncDecl: + // TODO(adonovan): FuncDecl.Comment? + + // Uniquely, FuncDecl breaks the invariant that + // preorder traversal yields tokens in lexical order: + // in fact, FuncDecl.Recv precedes FuncDecl.Type.Func. + // + // As a workaround, we inline the case for FuncType + // here and order things correctly. + // + children = nil // discard ast.Walk(FuncDecl) info subtrees + children = append(children, tok(n.Type.Func, len("func"))) + if n.Recv != nil { + children = append(children, n.Recv) + } + children = append(children, n.Name) + if n.Type.Params != nil { + children = append(children, n.Type.Params) + } + if n.Type.Results != nil { + children = append(children, n.Type.Results) + } + if n.Body != nil { + children = append(children, n.Body) + } + + case *ast.FuncLit: + // nop + + case *ast.FuncType: + if n.Func != 0 { + children = append(children, + tok(n.Func, len("func"))) + } + + case *ast.GenDecl: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + if n.Lparen != 0 { + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + } + + case *ast.GoStmt: + children = append(children, + tok(n.Go, len("go"))) + + case *ast.Ident: + children = append(children, + tok(n.NamePos, len(n.Name))) + + case *ast.IfStmt: + children = append(children, + tok(n.If, len("if"))) + + case *ast.ImportSpec: + // TODO(adonovan): ImportSpec.{Doc,EndPos}? + + case *ast.IncDecStmt: + children = append(children, + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.IndexExpr: + children = append(children, + tok(n.Lbrack, len("{")), + tok(n.Rbrack, len("}"))) + + case *ast.InterfaceType: + children = append(children, + tok(n.Interface, len("interface"))) + + case *ast.KeyValueExpr: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.LabeledStmt: + children = append(children, + tok(n.Colon, len(":"))) + + case *ast.MapType: + children = append(children, + tok(n.Map, len("map"))) + + case *ast.ParenExpr: + children = append(children, + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.RangeStmt: + children = append(children, + tok(n.For, len("for")), + tok(n.TokPos, len(n.Tok.String()))) + + case *ast.ReturnStmt: + children = append(children, + tok(n.Return, len("return"))) + + case *ast.SelectStmt: + children = append(children, + tok(n.Select, len("select"))) + + case *ast.SelectorExpr: + // nop + + case *ast.SendStmt: + children = append(children, + tok(n.Arrow, len("<-"))) + + case *ast.SliceExpr: + children = append(children, + tok(n.Lbrack, len("[")), + tok(n.Rbrack, len("]"))) + + case *ast.StarExpr: + children = append(children, tok(n.Star, len("*"))) + + case *ast.StructType: + children = append(children, tok(n.Struct, len("struct"))) + + case *ast.SwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.TypeAssertExpr: + children = append(children, + tok(n.Lparen-1, len(".")), + tok(n.Lparen, len("(")), + tok(n.Rparen, len(")"))) + + case *ast.TypeSpec: + // TODO(adonovan): TypeSpec.{Doc,Comment}? + + case *ast.TypeSwitchStmt: + children = append(children, tok(n.Switch, len("switch"))) + + case *ast.UnaryExpr: + children = append(children, tok(n.OpPos, len(n.Op.String()))) + + case *ast.ValueSpec: + // TODO(adonovan): ValueSpec.{Doc,Comment}? + + case *ast.BadDecl, *ast.BadExpr, *ast.BadStmt: + // nop + } + + // TODO(adonovan): opt: merge the logic of ast.Inspect() into + // the switch above so we can make interleaved callbacks for + // both Nodes and Tokens in the right order and avoid the need + // to sort. + sort.Sort(byPos(children)) + + return children +} + +type byPos []ast.Node + +func (sl byPos) Len() int { + return len(sl) +} +func (sl byPos) Less(i, j int) bool { + return sl[i].Pos() < sl[j].Pos() +} +func (sl byPos) Swap(i, j int) { + sl[i], sl[j] = sl[j], sl[i] +} + +// NodeDescription returns a description of the concrete type of n suitable +// for a user interface. +// +// TODO(adonovan): in some cases (e.g. Field, FieldList, Ident, +// StarExpr) we could be much more specific given the path to the AST +// root. Perhaps we should do that. +// +func NodeDescription(n ast.Node) string { + switch n := n.(type) { + case *ast.ArrayType: + return "array type" + case *ast.AssignStmt: + return "assignment" + case *ast.BadDecl: + return "bad declaration" + case *ast.BadExpr: + return "bad expression" + case *ast.BadStmt: + return "bad statement" + case *ast.BasicLit: + return "basic literal" + case *ast.BinaryExpr: + return fmt.Sprintf("binary %s operation", n.Op) + case *ast.BlockStmt: + return "block" + case *ast.BranchStmt: + switch n.Tok { + case token.BREAK: + return "break statement" + case token.CONTINUE: + return "continue statement" + case token.GOTO: + return "goto statement" + case token.FALLTHROUGH: + return "fall-through statement" + } + case *ast.CallExpr: + if len(n.Args) == 1 && !n.Ellipsis.IsValid() { + return "function call (or conversion)" + } + return "function call" + case *ast.CaseClause: + return "case clause" + case *ast.ChanType: + return "channel type" + case *ast.CommClause: + return "communication clause" + case *ast.Comment: + return "comment" + case *ast.CommentGroup: + return "comment group" + case *ast.CompositeLit: + return "composite literal" + case *ast.DeclStmt: + return NodeDescription(n.Decl) + " statement" + case *ast.DeferStmt: + return "defer statement" + case *ast.Ellipsis: + return "ellipsis" + case *ast.EmptyStmt: + return "empty statement" + case *ast.ExprStmt: + return "expression statement" + case *ast.Field: + // Can be any of these: + // struct {x, y int} -- struct field(s) + // struct {T} -- anon struct field + // interface {I} -- interface embedding + // interface {f()} -- interface method + // func (A) func(B) C -- receiver, param(s), result(s) + return "field/method/parameter" + case *ast.FieldList: + return "field/method/parameter list" + case *ast.File: + return "source file" + case *ast.ForStmt: + return "for loop" + case *ast.FuncDecl: + return "function declaration" + case *ast.FuncLit: + return "function literal" + case *ast.FuncType: + return "function type" + case *ast.GenDecl: + switch n.Tok { + case token.IMPORT: + return "import declaration" + case token.CONST: + return "constant declaration" + case token.TYPE: + return "type declaration" + case token.VAR: + return "variable declaration" + } + case *ast.GoStmt: + return "go statement" + case *ast.Ident: + return "identifier" + case *ast.IfStmt: + return "if statement" + case *ast.ImportSpec: + return "import specification" + case *ast.IncDecStmt: + if n.Tok == token.INC { + return "increment statement" + } + return "decrement statement" + case *ast.IndexExpr: + return "index expression" + case *ast.InterfaceType: + return "interface type" + case *ast.KeyValueExpr: + return "key/value association" + case *ast.LabeledStmt: + return "statement label" + case *ast.MapType: + return "map type" + case *ast.Package: + return "package" + case *ast.ParenExpr: + return "parenthesized " + NodeDescription(n.X) + case *ast.RangeStmt: + return "range loop" + case *ast.ReturnStmt: + return "return statement" + case *ast.SelectStmt: + return "select statement" + case *ast.SelectorExpr: + return "selector" + case *ast.SendStmt: + return "channel send" + case *ast.SliceExpr: + return "slice expression" + case *ast.StarExpr: + return "*-operation" // load/store expr or pointer type + case *ast.StructType: + return "struct type" + case *ast.SwitchStmt: + return "switch statement" + case *ast.TypeAssertExpr: + return "type assertion" + case *ast.TypeSpec: + return "type specification" + case *ast.TypeSwitchStmt: + return "type switch" + case *ast.UnaryExpr: + return fmt.Sprintf("unary %s operation", n.Op) + case *ast.ValueSpec: + return "value specification" + + } + panic(fmt.Sprintf("unexpected node type: %T", n)) +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/imports.go b/vendor/golang.org/x/tools/go/ast/astutil/imports.go new file mode 100644 index 000000000..2087ceec9 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/imports.go @@ -0,0 +1,482 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package astutil contains common utilities for working with the Go AST. +package astutil // import "golang.org/x/tools/go/ast/astutil" + +import ( + "fmt" + "go/ast" + "go/token" + "strconv" + "strings" +) + +// AddImport adds the import path to the file f, if absent. +func AddImport(fset *token.FileSet, f *ast.File, path string) (added bool) { + return AddNamedImport(fset, f, "", path) +} + +// AddNamedImport adds the import with the given name and path to the file f, if absent. +// If name is not empty, it is used to rename the import. +// +// For example, calling +// AddNamedImport(fset, f, "pathpkg", "path") +// adds +// import pathpkg "path" +func AddNamedImport(fset *token.FileSet, f *ast.File, name, path string) (added bool) { + if imports(f, name, path) { + return false + } + + newImport := &ast.ImportSpec{ + Path: &ast.BasicLit{ + Kind: token.STRING, + Value: strconv.Quote(path), + }, + } + if name != "" { + newImport.Name = &ast.Ident{Name: name} + } + + // Find an import decl to add to. + // The goal is to find an existing import + // whose import path has the longest shared + // prefix with path. + var ( + bestMatch = -1 // length of longest shared prefix + lastImport = -1 // index in f.Decls of the file's final import decl + impDecl *ast.GenDecl // import decl containing the best match + impIndex = -1 // spec index in impDecl containing the best match + + isThirdPartyPath = isThirdParty(path) + ) + for i, decl := range f.Decls { + gen, ok := decl.(*ast.GenDecl) + if ok && gen.Tok == token.IMPORT { + lastImport = i + // Do not add to import "C", to avoid disrupting the + // association with its doc comment, breaking cgo. + if declImports(gen, "C") { + continue + } + + // Match an empty import decl if that's all that is available. + if len(gen.Specs) == 0 && bestMatch == -1 { + impDecl = gen + } + + // Compute longest shared prefix with imports in this group and find best + // matched import spec. + // 1. Always prefer import spec with longest shared prefix. + // 2. While match length is 0, + // - for stdlib package: prefer first import spec. + // - for third party package: prefer first third party import spec. + // We cannot use last import spec as best match for third party package + // because grouped imports are usually placed last by goimports -local + // flag. + // See issue #19190. + seenAnyThirdParty := false + for j, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + p := importPath(impspec) + n := matchLen(p, path) + if n > bestMatch || (bestMatch == 0 && !seenAnyThirdParty && isThirdPartyPath) { + bestMatch = n + impDecl = gen + impIndex = j + } + seenAnyThirdParty = seenAnyThirdParty || isThirdParty(p) + } + } + } + + // If no import decl found, add one after the last import. + if impDecl == nil { + impDecl = &ast.GenDecl{ + Tok: token.IMPORT, + } + if lastImport >= 0 { + impDecl.TokPos = f.Decls[lastImport].End() + } else { + // There are no existing imports. + // Our new import, preceded by a blank line, goes after the package declaration + // and after the comment, if any, that starts on the same line as the + // package declaration. + impDecl.TokPos = f.Package + + file := fset.File(f.Package) + pkgLine := file.Line(f.Package) + for _, c := range f.Comments { + if file.Line(c.Pos()) > pkgLine { + break + } + // +2 for a blank line + impDecl.TokPos = c.End() + 2 + } + } + f.Decls = append(f.Decls, nil) + copy(f.Decls[lastImport+2:], f.Decls[lastImport+1:]) + f.Decls[lastImport+1] = impDecl + } + + // Insert new import at insertAt. + insertAt := 0 + if impIndex >= 0 { + // insert after the found import + insertAt = impIndex + 1 + } + impDecl.Specs = append(impDecl.Specs, nil) + copy(impDecl.Specs[insertAt+1:], impDecl.Specs[insertAt:]) + impDecl.Specs[insertAt] = newImport + pos := impDecl.Pos() + if insertAt > 0 { + // If there is a comment after an existing import, preserve the comment + // position by adding the new import after the comment. + if spec, ok := impDecl.Specs[insertAt-1].(*ast.ImportSpec); ok && spec.Comment != nil { + pos = spec.Comment.End() + } else { + // Assign same position as the previous import, + // so that the sorter sees it as being in the same block. + pos = impDecl.Specs[insertAt-1].Pos() + } + } + if newImport.Name != nil { + newImport.Name.NamePos = pos + } + newImport.Path.ValuePos = pos + newImport.EndPos = pos + + // Clean up parens. impDecl contains at least one spec. + if len(impDecl.Specs) == 1 { + // Remove unneeded parens. + impDecl.Lparen = token.NoPos + } else if !impDecl.Lparen.IsValid() { + // impDecl needs parens added. + impDecl.Lparen = impDecl.Specs[0].Pos() + } + + f.Imports = append(f.Imports, newImport) + + if len(f.Decls) <= 1 { + return true + } + + // Merge all the import declarations into the first one. + var first *ast.GenDecl + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT || declImports(gen, "C") { + continue + } + if first == nil { + first = gen + continue // Don't touch the first one. + } + // We now know there is more than one package in this import + // declaration. Ensure that it ends up parenthesized. + first.Lparen = first.Pos() + // Move the imports of the other import declaration to the first one. + for _, spec := range gen.Specs { + spec.(*ast.ImportSpec).Path.ValuePos = first.Pos() + first.Specs = append(first.Specs, spec) + } + f.Decls = append(f.Decls[:i], f.Decls[i+1:]...) + i-- + } + + return true +} + +func isThirdParty(importPath string) bool { + // Third party package import path usually contains "." (".com", ".org", ...) + // This logic is taken from golang.org/x/tools/imports package. + return strings.Contains(importPath, ".") +} + +// DeleteImport deletes the import path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteImport(fset *token.FileSet, f *ast.File, path string) (deleted bool) { + return DeleteNamedImport(fset, f, "", path) +} + +// DeleteNamedImport deletes the import with the given name and path from the file f, if present. +// If there are duplicate import declarations, all matching ones are deleted. +func DeleteNamedImport(fset *token.FileSet, f *ast.File, name, path string) (deleted bool) { + var delspecs []*ast.ImportSpec + var delcomments []*ast.CommentGroup + + // Find the import nodes that import path, if any. + for i := 0; i < len(f.Decls); i++ { + decl := f.Decls[i] + gen, ok := decl.(*ast.GenDecl) + if !ok || gen.Tok != token.IMPORT { + continue + } + for j := 0; j < len(gen.Specs); j++ { + spec := gen.Specs[j] + impspec := spec.(*ast.ImportSpec) + if importName(impspec) != name || importPath(impspec) != path { + continue + } + + // We found an import spec that imports path. + // Delete it. + delspecs = append(delspecs, impspec) + deleted = true + copy(gen.Specs[j:], gen.Specs[j+1:]) + gen.Specs = gen.Specs[:len(gen.Specs)-1] + + // If this was the last import spec in this decl, + // delete the decl, too. + if len(gen.Specs) == 0 { + copy(f.Decls[i:], f.Decls[i+1:]) + f.Decls = f.Decls[:len(f.Decls)-1] + i-- + break + } else if len(gen.Specs) == 1 { + if impspec.Doc != nil { + delcomments = append(delcomments, impspec.Doc) + } + if impspec.Comment != nil { + delcomments = append(delcomments, impspec.Comment) + } + for _, cg := range f.Comments { + // Found comment on the same line as the import spec. + if cg.End() < impspec.Pos() && fset.Position(cg.End()).Line == fset.Position(impspec.Pos()).Line { + delcomments = append(delcomments, cg) + break + } + } + + spec := gen.Specs[0].(*ast.ImportSpec) + + // Move the documentation right after the import decl. + if spec.Doc != nil { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Doc.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + } + for _, cg := range f.Comments { + if cg.End() < spec.Pos() && fset.Position(cg.End()).Line == fset.Position(spec.Pos()).Line { + for fset.Position(gen.TokPos).Line+1 < fset.Position(spec.Pos()).Line { + fset.File(gen.TokPos).MergeLine(fset.Position(gen.TokPos).Line) + } + break + } + } + } + if j > 0 { + lastImpspec := gen.Specs[j-1].(*ast.ImportSpec) + lastLine := fset.Position(lastImpspec.Path.ValuePos).Line + line := fset.Position(impspec.Path.ValuePos).Line + + // We deleted an entry but now there may be + // a blank line-sized hole where the import was. + if line-lastLine > 1 || !gen.Rparen.IsValid() { + // There was a blank line immediately preceding the deleted import, + // so there's no need to close the hole. The right parenthesis is + // invalid after AddImport to an import statement without parenthesis. + // Do nothing. + } else if line != fset.File(gen.Rparen).LineCount() { + // There was no blank line. Close the hole. + fset.File(gen.Rparen).MergeLine(line) + } + } + j-- + } + } + + // Delete imports from f.Imports. + for i := 0; i < len(f.Imports); i++ { + imp := f.Imports[i] + for j, del := range delspecs { + if imp == del { + copy(f.Imports[i:], f.Imports[i+1:]) + f.Imports = f.Imports[:len(f.Imports)-1] + copy(delspecs[j:], delspecs[j+1:]) + delspecs = delspecs[:len(delspecs)-1] + i-- + break + } + } + } + + // Delete comments from f.Comments. + for i := 0; i < len(f.Comments); i++ { + cg := f.Comments[i] + for j, del := range delcomments { + if cg == del { + copy(f.Comments[i:], f.Comments[i+1:]) + f.Comments = f.Comments[:len(f.Comments)-1] + copy(delcomments[j:], delcomments[j+1:]) + delcomments = delcomments[:len(delcomments)-1] + i-- + break + } + } + } + + if len(delspecs) > 0 { + panic(fmt.Sprintf("deleted specs from Decls but not Imports: %v", delspecs)) + } + + return +} + +// RewriteImport rewrites any import of path oldPath to path newPath. +func RewriteImport(fset *token.FileSet, f *ast.File, oldPath, newPath string) (rewrote bool) { + for _, imp := range f.Imports { + if importPath(imp) == oldPath { + rewrote = true + // record old End, because the default is to compute + // it using the length of imp.Path.Value. + imp.EndPos = imp.End() + imp.Path.Value = strconv.Quote(newPath) + } + } + return +} + +// UsesImport reports whether a given import is used. +func UsesImport(f *ast.File, path string) (used bool) { + spec := importSpec(f, path) + if spec == nil { + return + } + + name := spec.Name.String() + switch name { + case "": + // If the package name is not explicitly specified, + // make an educated guess. This is not guaranteed to be correct. + lastSlash := strings.LastIndex(path, "/") + if lastSlash == -1 { + name = path + } else { + name = path[lastSlash+1:] + } + case "_", ".": + // Not sure if this import is used - err on the side of caution. + return true + } + + ast.Walk(visitFn(func(n ast.Node) { + sel, ok := n.(*ast.SelectorExpr) + if ok && isTopName(sel.X, name) { + used = true + } + }), f) + + return +} + +type visitFn func(node ast.Node) + +func (fn visitFn) Visit(node ast.Node) ast.Visitor { + fn(node) + return fn +} + +// imports reports whether f has an import with the specified name and path. +func imports(f *ast.File, name, path string) bool { + for _, s := range f.Imports { + if importName(s) == name && importPath(s) == path { + return true + } + } + return false +} + +// importSpec returns the import spec if f imports path, +// or nil otherwise. +func importSpec(f *ast.File, path string) *ast.ImportSpec { + for _, s := range f.Imports { + if importPath(s) == path { + return s + } + } + return nil +} + +// importName returns the name of s, +// or "" if the import is not named. +func importName(s *ast.ImportSpec) string { + if s.Name == nil { + return "" + } + return s.Name.Name +} + +// importPath returns the unquoted import path of s, +// or "" if the path is not properly quoted. +func importPath(s *ast.ImportSpec) string { + t, err := strconv.Unquote(s.Path.Value) + if err != nil { + return "" + } + return t +} + +// declImports reports whether gen contains an import of path. +func declImports(gen *ast.GenDecl, path string) bool { + if gen.Tok != token.IMPORT { + return false + } + for _, spec := range gen.Specs { + impspec := spec.(*ast.ImportSpec) + if importPath(impspec) == path { + return true + } + } + return false +} + +// matchLen returns the length of the longest path segment prefix shared by x and y. +func matchLen(x, y string) int { + n := 0 + for i := 0; i < len(x) && i < len(y) && x[i] == y[i]; i++ { + if x[i] == '/' { + n++ + } + } + return n +} + +// isTopName returns true if n is a top-level unresolved identifier with the given name. +func isTopName(n ast.Expr, name string) bool { + id, ok := n.(*ast.Ident) + return ok && id.Name == name && id.Obj == nil +} + +// Imports returns the file imports grouped by paragraph. +func Imports(fset *token.FileSet, f *ast.File) [][]*ast.ImportSpec { + var groups [][]*ast.ImportSpec + + for _, decl := range f.Decls { + genDecl, ok := decl.(*ast.GenDecl) + if !ok || genDecl.Tok != token.IMPORT { + break + } + + group := []*ast.ImportSpec{} + + var lastLine int + for _, spec := range genDecl.Specs { + importSpec := spec.(*ast.ImportSpec) + pos := importSpec.Path.ValuePos + line := fset.Position(pos).Line + if lastLine > 0 && pos > 0 && line-lastLine > 1 { + groups = append(groups, group) + group = []*ast.ImportSpec{} + } + group = append(group, importSpec) + lastLine = line + } + groups = append(groups, group) + } + + return groups +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go new file mode 100644 index 000000000..b949fc840 --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/rewrite.go @@ -0,0 +1,483 @@ +// Copyright 2017 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import ( + "fmt" + "go/ast" + "reflect" + "sort" + + "golang.org/x/tools/internal/typeparams" +) + +// An ApplyFunc is invoked by Apply for each node n, even if n is nil, +// before and/or after the node's children, using a Cursor describing +// the current node and providing operations on it. +// +// The return value of ApplyFunc controls the syntax tree traversal. +// See Apply for details. +type ApplyFunc func(*Cursor) bool + +// Apply traverses a syntax tree recursively, starting with root, +// and calling pre and post for each node as described below. +// Apply returns the syntax tree, possibly modified. +// +// If pre is not nil, it is called for each node before the node's +// children are traversed (pre-order). If pre returns false, no +// children are traversed, and post is not called for that node. +// +// If post is not nil, and a prior call of pre didn't return false, +// post is called for each node after its children are traversed +// (post-order). If post returns false, traversal is terminated and +// Apply returns immediately. +// +// Only fields that refer to AST nodes are considered children; +// i.e., token.Pos, Scopes, Objects, and fields of basic types +// (strings, etc.) are ignored. +// +// Children are traversed in the order in which they appear in the +// respective node's struct definition. A package's files are +// traversed in the filenames' alphabetical order. +// +func Apply(root ast.Node, pre, post ApplyFunc) (result ast.Node) { + parent := &struct{ ast.Node }{root} + defer func() { + if r := recover(); r != nil && r != abort { + panic(r) + } + result = parent.Node + }() + a := &application{pre: pre, post: post} + a.apply(parent, "Node", nil, root) + return +} + +var abort = new(int) // singleton, to signal termination of Apply + +// A Cursor describes a node encountered during Apply. +// Information about the node and its parent is available +// from the Node, Parent, Name, and Index methods. +// +// If p is a variable of type and value of the current parent node +// c.Parent(), and f is the field identifier with name c.Name(), +// the following invariants hold: +// +// p.f == c.Node() if c.Index() < 0 +// p.f[c.Index()] == c.Node() if c.Index() >= 0 +// +// The methods Replace, Delete, InsertBefore, and InsertAfter +// can be used to change the AST without disrupting Apply. +type Cursor struct { + parent ast.Node + name string + iter *iterator // valid if non-nil + node ast.Node +} + +// Node returns the current Node. +func (c *Cursor) Node() ast.Node { return c.node } + +// Parent returns the parent of the current Node. +func (c *Cursor) Parent() ast.Node { return c.parent } + +// Name returns the name of the parent Node field that contains the current Node. +// If the parent is a *ast.Package and the current Node is a *ast.File, Name returns +// the filename for the current Node. +func (c *Cursor) Name() string { return c.name } + +// Index reports the index >= 0 of the current Node in the slice of Nodes that +// contains it, or a value < 0 if the current Node is not part of a slice. +// The index of the current node changes if InsertBefore is called while +// processing the current node. +func (c *Cursor) Index() int { + if c.iter != nil { + return c.iter.index + } + return -1 +} + +// field returns the current node's parent field value. +func (c *Cursor) field() reflect.Value { + return reflect.Indirect(reflect.ValueOf(c.parent)).FieldByName(c.name) +} + +// Replace replaces the current Node with n. +// The replacement node is not walked by Apply. +func (c *Cursor) Replace(n ast.Node) { + if _, ok := c.node.(*ast.File); ok { + file, ok := n.(*ast.File) + if !ok { + panic("attempt to replace *ast.File with non-*ast.File") + } + c.parent.(*ast.Package).Files[c.name] = file + return + } + + v := c.field() + if i := c.Index(); i >= 0 { + v = v.Index(i) + } + v.Set(reflect.ValueOf(n)) +} + +// Delete deletes the current Node from its containing slice. +// If the current Node is not part of a slice, Delete panics. +// As a special case, if the current node is a package file, +// Delete removes it from the package's Files map. +func (c *Cursor) Delete() { + if _, ok := c.node.(*ast.File); ok { + delete(c.parent.(*ast.Package).Files, c.name) + return + } + + i := c.Index() + if i < 0 { + panic("Delete node not contained in slice") + } + v := c.field() + l := v.Len() + reflect.Copy(v.Slice(i, l), v.Slice(i+1, l)) + v.Index(l - 1).Set(reflect.Zero(v.Type().Elem())) + v.SetLen(l - 1) + c.iter.step-- +} + +// InsertAfter inserts n after the current Node in its containing slice. +// If the current Node is not part of a slice, InsertAfter panics. +// Apply does not walk n. +func (c *Cursor) InsertAfter(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertAfter node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+2, l), v.Slice(i+1, l)) + v.Index(i + 1).Set(reflect.ValueOf(n)) + c.iter.step++ +} + +// InsertBefore inserts n before the current Node in its containing slice. +// If the current Node is not part of a slice, InsertBefore panics. +// Apply will not walk n. +func (c *Cursor) InsertBefore(n ast.Node) { + i := c.Index() + if i < 0 { + panic("InsertBefore node not contained in slice") + } + v := c.field() + v.Set(reflect.Append(v, reflect.Zero(v.Type().Elem()))) + l := v.Len() + reflect.Copy(v.Slice(i+1, l), v.Slice(i, l)) + v.Index(i).Set(reflect.ValueOf(n)) + c.iter.index++ +} + +// application carries all the shared data so we can pass it around cheaply. +type application struct { + pre, post ApplyFunc + cursor Cursor + iter iterator +} + +func (a *application) apply(parent ast.Node, name string, iter *iterator, n ast.Node) { + // convert typed nil into untyped nil + if v := reflect.ValueOf(n); v.Kind() == reflect.Ptr && v.IsNil() { + n = nil + } + + // avoid heap-allocating a new cursor for each apply call; reuse a.cursor instead + saved := a.cursor + a.cursor.parent = parent + a.cursor.name = name + a.cursor.iter = iter + a.cursor.node = n + + if a.pre != nil && !a.pre(&a.cursor) { + a.cursor = saved + return + } + + // walk children + // (the order of the cases matches the order of the corresponding node types in go/ast) + switch n := n.(type) { + case nil: + // nothing to do + + // Comments and fields + case *ast.Comment: + // nothing to do + + case *ast.CommentGroup: + if n != nil { + a.applyList(n, "List") + } + + case *ast.Field: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.FieldList: + a.applyList(n, "List") + + // Expressions + case *ast.BadExpr, *ast.Ident, *ast.BasicLit: + // nothing to do + + case *ast.Ellipsis: + a.apply(n, "Elt", nil, n.Elt) + + case *ast.FuncLit: + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + case *ast.CompositeLit: + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Elts") + + case *ast.ParenExpr: + a.apply(n, "X", nil, n.X) + + case *ast.SelectorExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Sel", nil, n.Sel) + + case *ast.IndexExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Index", nil, n.Index) + + case *ast.SliceExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Low", nil, n.Low) + a.apply(n, "High", nil, n.High) + a.apply(n, "Max", nil, n.Max) + + case *ast.TypeAssertExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Type", nil, n.Type) + + case *ast.CallExpr: + a.apply(n, "Fun", nil, n.Fun) + a.applyList(n, "Args") + + case *ast.StarExpr: + a.apply(n, "X", nil, n.X) + + case *ast.UnaryExpr: + a.apply(n, "X", nil, n.X) + + case *ast.BinaryExpr: + a.apply(n, "X", nil, n.X) + a.apply(n, "Y", nil, n.Y) + + case *ast.KeyValueExpr: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + // Types + case *ast.ArrayType: + a.apply(n, "Len", nil, n.Len) + a.apply(n, "Elt", nil, n.Elt) + + case *ast.StructType: + a.apply(n, "Fields", nil, n.Fields) + + case *ast.FuncType: + a.apply(n, "Params", nil, n.Params) + a.apply(n, "Results", nil, n.Results) + + case *ast.InterfaceType: + a.apply(n, "Methods", nil, n.Methods) + + case *ast.MapType: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + + case *ast.ChanType: + a.apply(n, "Value", nil, n.Value) + + // Statements + case *ast.BadStmt: + // nothing to do + + case *ast.DeclStmt: + a.apply(n, "Decl", nil, n.Decl) + + case *ast.EmptyStmt: + // nothing to do + + case *ast.LabeledStmt: + a.apply(n, "Label", nil, n.Label) + a.apply(n, "Stmt", nil, n.Stmt) + + case *ast.ExprStmt: + a.apply(n, "X", nil, n.X) + + case *ast.SendStmt: + a.apply(n, "Chan", nil, n.Chan) + a.apply(n, "Value", nil, n.Value) + + case *ast.IncDecStmt: + a.apply(n, "X", nil, n.X) + + case *ast.AssignStmt: + a.applyList(n, "Lhs") + a.applyList(n, "Rhs") + + case *ast.GoStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.DeferStmt: + a.apply(n, "Call", nil, n.Call) + + case *ast.ReturnStmt: + a.applyList(n, "Results") + + case *ast.BranchStmt: + a.apply(n, "Label", nil, n.Label) + + case *ast.BlockStmt: + a.applyList(n, "List") + + case *ast.IfStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Body", nil, n.Body) + a.apply(n, "Else", nil, n.Else) + + case *ast.CaseClause: + a.applyList(n, "List") + a.applyList(n, "Body") + + case *ast.SwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Tag", nil, n.Tag) + a.apply(n, "Body", nil, n.Body) + + case *ast.TypeSwitchStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Assign", nil, n.Assign) + a.apply(n, "Body", nil, n.Body) + + case *ast.CommClause: + a.apply(n, "Comm", nil, n.Comm) + a.applyList(n, "Body") + + case *ast.SelectStmt: + a.apply(n, "Body", nil, n.Body) + + case *ast.ForStmt: + a.apply(n, "Init", nil, n.Init) + a.apply(n, "Cond", nil, n.Cond) + a.apply(n, "Post", nil, n.Post) + a.apply(n, "Body", nil, n.Body) + + case *ast.RangeStmt: + a.apply(n, "Key", nil, n.Key) + a.apply(n, "Value", nil, n.Value) + a.apply(n, "X", nil, n.X) + a.apply(n, "Body", nil, n.Body) + + // Declarations + case *ast.ImportSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Path", nil, n.Path) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.ValueSpec: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Names") + a.apply(n, "Type", nil, n.Type) + a.applyList(n, "Values") + a.apply(n, "Comment", nil, n.Comment) + + case *ast.TypeSpec: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Comment", nil, n.Comment) + + case *ast.BadDecl: + // nothing to do + + case *ast.GenDecl: + a.apply(n, "Doc", nil, n.Doc) + a.applyList(n, "Specs") + + case *ast.FuncDecl: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Recv", nil, n.Recv) + a.apply(n, "Name", nil, n.Name) + a.apply(n, "Type", nil, n.Type) + a.apply(n, "Body", nil, n.Body) + + // Files and packages + case *ast.File: + a.apply(n, "Doc", nil, n.Doc) + a.apply(n, "Name", nil, n.Name) + a.applyList(n, "Decls") + // Don't walk n.Comments; they have either been walked already if + // they are Doc comments, or they can be easily walked explicitly. + + case *ast.Package: + // collect and sort names for reproducible behavior + var names []string + for name := range n.Files { + names = append(names, name) + } + sort.Strings(names) + for _, name := range names { + a.apply(n, name, nil, n.Files[name]) + } + + default: + if typeparams.IsListExpr(n) { + a.applyList(n, "ElemList") + } else { + panic(fmt.Sprintf("Apply: unexpected node type %T", n)) + } + } + + if a.post != nil && !a.post(&a.cursor) { + panic(abort) + } + + a.cursor = saved +} + +// An iterator controls iteration over a slice of nodes. +type iterator struct { + index, step int +} + +func (a *application) applyList(parent ast.Node, name string) { + // avoid heap-allocating a new iterator for each applyList call; reuse a.iter instead + saved := a.iter + a.iter.index = 0 + for { + // must reload parent.name each time, since cursor modifications might change it + v := reflect.Indirect(reflect.ValueOf(parent)).FieldByName(name) + if a.iter.index >= v.Len() { + break + } + + // element x may be nil in a bad AST - be cautious + var x ast.Node + if e := v.Index(a.iter.index); e.IsValid() { + x = e.Interface().(ast.Node) + } + + a.iter.step = 1 + a.apply(parent, name, &a.iter, x) + a.iter.index += a.iter.step + } + a.iter = saved +} diff --git a/vendor/golang.org/x/tools/go/ast/astutil/util.go b/vendor/golang.org/x/tools/go/ast/astutil/util.go new file mode 100644 index 000000000..919d5305a --- /dev/null +++ b/vendor/golang.org/x/tools/go/ast/astutil/util.go @@ -0,0 +1,18 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package astutil + +import "go/ast" + +// Unparen returns e with any enclosing parentheses stripped. +func Unparen(e ast.Expr) ast.Expr { + for { + p, ok := e.(*ast.ParenExpr) + if !ok { + return e + } + e = p.X + } +} diff --git a/vendor/golang.org/x/tools/go/buildutil/allpackages.go b/vendor/golang.org/x/tools/go/buildutil/allpackages.go new file mode 100644 index 000000000..c0cb03e7b --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/allpackages.go @@ -0,0 +1,198 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package buildutil provides utilities related to the go/build +// package in the standard library. +// +// All I/O is done via the build.Context file system interface, which must +// be concurrency-safe. +package buildutil // import "golang.org/x/tools/go/buildutil" + +import ( + "go/build" + "os" + "path/filepath" + "sort" + "strings" + "sync" +) + +// AllPackages returns the package path of each Go package in any source +// directory of the specified build context (e.g. $GOROOT or an element +// of $GOPATH). Errors are ignored. The results are sorted. +// All package paths are canonical, and thus may contain "/vendor/". +// +// The result may include import paths for directories that contain no +// *.go files, such as "archive" (in $GOROOT/src). +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +// +func AllPackages(ctxt *build.Context) []string { + var list []string + ForEachPackage(ctxt, func(pkg string, _ error) { + list = append(list, pkg) + }) + sort.Strings(list) + return list +} + +// ForEachPackage calls the found function with the package path of +// each Go package it finds in any source directory of the specified +// build context (e.g. $GOROOT or an element of $GOPATH). +// All package paths are canonical, and thus may contain "/vendor/". +// +// If the package directory exists but could not be read, the second +// argument to the found function provides the error. +// +// All I/O is done via the build.Context file system interface, +// which must be concurrency-safe. +// +func ForEachPackage(ctxt *build.Context, found func(importPath string, err error)) { + ch := make(chan item) + + var wg sync.WaitGroup + for _, root := range ctxt.SrcDirs() { + root := root + wg.Add(1) + go func() { + allPackages(ctxt, root, ch) + wg.Done() + }() + } + go func() { + wg.Wait() + close(ch) + }() + + // All calls to found occur in the caller's goroutine. + for i := range ch { + found(i.importPath, i.err) + } +} + +type item struct { + importPath string + err error // (optional) +} + +// We use a process-wide counting semaphore to limit +// the number of parallel calls to ReadDir. +var ioLimit = make(chan bool, 20) + +func allPackages(ctxt *build.Context, root string, ch chan<- item) { + root = filepath.Clean(root) + string(os.PathSeparator) + + var wg sync.WaitGroup + + var walkDir func(dir string) + walkDir = func(dir string) { + // Avoid .foo, _foo, and testdata directory trees. + base := filepath.Base(dir) + if base == "" || base[0] == '.' || base[0] == '_' || base == "testdata" { + return + } + + pkg := filepath.ToSlash(strings.TrimPrefix(dir, root)) + + // Prune search if we encounter any of these import paths. + switch pkg { + case "builtin": + return + } + + ioLimit <- true + files, err := ReadDir(ctxt, dir) + <-ioLimit + if pkg != "" || err != nil { + ch <- item{pkg, err} + } + for _, fi := range files { + fi := fi + if fi.IsDir() { + wg.Add(1) + go func() { + walkDir(filepath.Join(dir, fi.Name())) + wg.Done() + }() + } + } + } + + walkDir(root) + wg.Wait() +} + +// ExpandPatterns returns the set of packages matched by patterns, +// which may have the following forms: +// +// golang.org/x/tools/cmd/guru # a single package +// golang.org/x/tools/... # all packages beneath dir +// ... # the entire workspace. +// +// Order is significant: a pattern preceded by '-' removes matching +// packages from the set. For example, these patterns match all encoding +// packages except encoding/xml: +// +// encoding/... -encoding/xml +// +// A trailing slash in a pattern is ignored. (Path components of Go +// package names are separated by slash, not the platform's path separator.) +// +func ExpandPatterns(ctxt *build.Context, patterns []string) map[string]bool { + // TODO(adonovan): support other features of 'go list': + // - "std"/"cmd"/"all" meta-packages + // - "..." not at the end of a pattern + // - relative patterns using "./" or "../" prefix + + pkgs := make(map[string]bool) + doPkg := func(pkg string, neg bool) { + if neg { + delete(pkgs, pkg) + } else { + pkgs[pkg] = true + } + } + + // Scan entire workspace if wildcards are present. + // TODO(adonovan): opt: scan only the necessary subtrees of the workspace. + var all []string + for _, arg := range patterns { + if strings.HasSuffix(arg, "...") { + all = AllPackages(ctxt) + break + } + } + + for _, arg := range patterns { + if arg == "" { + continue + } + + neg := arg[0] == '-' + if neg { + arg = arg[1:] + } + + if arg == "..." { + // ... matches all packages + for _, pkg := range all { + doPkg(pkg, neg) + } + } else if dir := strings.TrimSuffix(arg, "/..."); dir != arg { + // dir/... matches all packages beneath dir + for _, pkg := range all { + if strings.HasPrefix(pkg, dir) && + (len(pkg) == len(dir) || pkg[len(dir)] == '/') { + doPkg(pkg, neg) + } + } + } else { + // single package + doPkg(strings.TrimSuffix(arg, "/"), neg) + } + } + + return pkgs +} diff --git a/vendor/golang.org/x/tools/go/buildutil/fakecontext.go b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go new file mode 100644 index 000000000..5fc672fd5 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/fakecontext.go @@ -0,0 +1,113 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "fmt" + "go/build" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "sort" + "strings" + "time" +) + +// FakeContext returns a build.Context for the fake file tree specified +// by pkgs, which maps package import paths to a mapping from file base +// names to contents. +// +// The fake Context has a GOROOT of "/go" and no GOPATH, and overrides +// the necessary file access methods to read from memory instead of the +// real file system. +// +// Unlike a real file tree, the fake one has only two levels---packages +// and files---so ReadDir("/go/src/") returns all packages under +// /go/src/ including, for instance, "math" and "math/big". +// ReadDir("/go/src/math/big") would return all the files in the +// "math/big" package. +// +func FakeContext(pkgs map[string]map[string]string) *build.Context { + clean := func(filename string) string { + f := path.Clean(filepath.ToSlash(filename)) + // Removing "/go/src" while respecting segment + // boundaries has this unfortunate corner case: + if f == "/go/src" { + return "" + } + return strings.TrimPrefix(f, "/go/src/") + } + + ctxt := build.Default // copy + ctxt.GOROOT = "/go" + ctxt.GOPATH = "" + ctxt.Compiler = "gc" + ctxt.IsDir = func(dir string) bool { + dir = clean(dir) + if dir == "" { + return true // needed by (*build.Context).SrcDirs + } + return pkgs[dir] != nil + } + ctxt.ReadDir = func(dir string) ([]os.FileInfo, error) { + dir = clean(dir) + var fis []os.FileInfo + if dir == "" { + // enumerate packages + for importPath := range pkgs { + fis = append(fis, fakeDirInfo(importPath)) + } + } else { + // enumerate files of package + for basename := range pkgs[dir] { + fis = append(fis, fakeFileInfo(basename)) + } + } + sort.Sort(byName(fis)) + return fis, nil + } + ctxt.OpenFile = func(filename string) (io.ReadCloser, error) { + filename = clean(filename) + dir, base := path.Split(filename) + content, ok := pkgs[path.Clean(dir)][base] + if !ok { + return nil, fmt.Errorf("file not found: %s", filename) + } + return ioutil.NopCloser(strings.NewReader(content)), nil + } + ctxt.IsAbsPath = func(path string) bool { + path = filepath.ToSlash(path) + // Don't rely on the default (filepath.Path) since on + // Windows, it reports virtual paths as non-absolute. + return strings.HasPrefix(path, "/") + } + return &ctxt +} + +type byName []os.FileInfo + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } + +type fakeFileInfo string + +func (fi fakeFileInfo) Name() string { return string(fi) } +func (fakeFileInfo) Sys() interface{} { return nil } +func (fakeFileInfo) ModTime() time.Time { return time.Time{} } +func (fakeFileInfo) IsDir() bool { return false } +func (fakeFileInfo) Size() int64 { return 0 } +func (fakeFileInfo) Mode() os.FileMode { return 0644 } + +type fakeDirInfo string + +func (fd fakeDirInfo) Name() string { return string(fd) } +func (fakeDirInfo) Sys() interface{} { return nil } +func (fakeDirInfo) ModTime() time.Time { return time.Time{} } +func (fakeDirInfo) IsDir() bool { return true } +func (fakeDirInfo) Size() int64 { return 0 } +func (fakeDirInfo) Mode() os.FileMode { return 0755 } diff --git a/vendor/golang.org/x/tools/go/buildutil/overlay.go b/vendor/golang.org/x/tools/go/buildutil/overlay.go new file mode 100644 index 000000000..8e239086b --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/overlay.go @@ -0,0 +1,103 @@ +// Copyright 2016 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "bufio" + "bytes" + "fmt" + "go/build" + "io" + "io/ioutil" + "path/filepath" + "strconv" + "strings" +) + +// OverlayContext overlays a build.Context with additional files from +// a map. Files in the map take precedence over other files. +// +// In addition to plain string comparison, two file names are +// considered equal if their base names match and their directory +// components point at the same directory on the file system. That is, +// symbolic links are followed for directories, but not files. +// +// A common use case for OverlayContext is to allow editors to pass in +// a set of unsaved, modified files. +// +// Currently, only the Context.OpenFile function will respect the +// overlay. This may change in the future. +func OverlayContext(orig *build.Context, overlay map[string][]byte) *build.Context { + // TODO(dominikh): Implement IsDir, HasSubdir and ReadDir + + rc := func(data []byte) (io.ReadCloser, error) { + return ioutil.NopCloser(bytes.NewBuffer(data)), nil + } + + copy := *orig // make a copy + ctxt := © + ctxt.OpenFile = func(path string) (io.ReadCloser, error) { + // Fast path: names match exactly. + if content, ok := overlay[path]; ok { + return rc(content) + } + + // Slow path: check for same file under a different + // alias, perhaps due to a symbolic link. + for filename, content := range overlay { + if sameFile(path, filename) { + return rc(content) + } + } + + return OpenFile(orig, path) + } + return ctxt +} + +// ParseOverlayArchive parses an archive containing Go files and their +// contents. The result is intended to be used with OverlayContext. +// +// +// Archive format +// +// The archive consists of a series of files. Each file consists of a +// name, a decimal file size and the file contents, separated by +// newlines. No newline follows after the file contents. +func ParseOverlayArchive(archive io.Reader) (map[string][]byte, error) { + overlay := make(map[string][]byte) + r := bufio.NewReader(archive) + for { + // Read file name. + filename, err := r.ReadString('\n') + if err != nil { + if err == io.EOF { + break // OK + } + return nil, fmt.Errorf("reading archive file name: %v", err) + } + filename = filepath.Clean(strings.TrimSpace(filename)) + + // Read file size. + sz, err := r.ReadString('\n') + if err != nil { + return nil, fmt.Errorf("reading size of archive file %s: %v", filename, err) + } + sz = strings.TrimSpace(sz) + size, err := strconv.ParseUint(sz, 10, 32) + if err != nil { + return nil, fmt.Errorf("parsing size of archive file %s: %v", filename, err) + } + + // Read file content. + content := make([]byte, size) + if _, err := io.ReadFull(r, content); err != nil { + return nil, fmt.Errorf("reading archive file %s: %v", filename, err) + } + overlay[filename] = content + } + + return overlay, nil +} diff --git a/vendor/golang.org/x/tools/go/buildutil/tags.go b/vendor/golang.org/x/tools/go/buildutil/tags.go new file mode 100644 index 000000000..6da0ce484 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/tags.go @@ -0,0 +1,79 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +// This logic was copied from stringsFlag from $GOROOT/src/cmd/go/build.go. + +import "fmt" + +const TagsFlagDoc = "a list of `build tags` to consider satisfied during the build. " + + "For more information about build tags, see the description of " + + "build constraints in the documentation for the go/build package" + +// TagsFlag is an implementation of the flag.Value and flag.Getter interfaces that parses +// a flag value in the same manner as go build's -tags flag and +// populates a []string slice. +// +// See $GOROOT/src/go/build/doc.go for description of build tags. +// See $GOROOT/src/cmd/go/doc.go for description of 'go build -tags' flag. +// +// Example: +// flag.Var((*buildutil.TagsFlag)(&build.Default.BuildTags), "tags", buildutil.TagsFlagDoc) +type TagsFlag []string + +func (v *TagsFlag) Set(s string) error { + var err error + *v, err = splitQuotedFields(s) + if *v == nil { + *v = []string{} + } + return err +} + +func (v *TagsFlag) Get() interface{} { return *v } + +func splitQuotedFields(s string) ([]string, error) { + // Split fields allowing '' or "" around elements. + // Quotes further inside the string do not count. + var f []string + for len(s) > 0 { + for len(s) > 0 && isSpaceByte(s[0]) { + s = s[1:] + } + if len(s) == 0 { + break + } + // Accepted quoted string. No unescaping inside. + if s[0] == '"' || s[0] == '\'' { + quote := s[0] + s = s[1:] + i := 0 + for i < len(s) && s[i] != quote { + i++ + } + if i >= len(s) { + return nil, fmt.Errorf("unterminated %c string", quote) + } + f = append(f, s[:i]) + s = s[i+1:] + continue + } + i := 0 + for i < len(s) && !isSpaceByte(s[i]) { + i++ + } + f = append(f, s[:i]) + s = s[i:] + } + return f, nil +} + +func (v *TagsFlag) String() string { + return "" +} + +func isSpaceByte(c byte) bool { + return c == ' ' || c == '\t' || c == '\n' || c == '\r' +} diff --git a/vendor/golang.org/x/tools/go/buildutil/util.go b/vendor/golang.org/x/tools/go/buildutil/util.go new file mode 100644 index 000000000..fc923d7a7 --- /dev/null +++ b/vendor/golang.org/x/tools/go/buildutil/util.go @@ -0,0 +1,212 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package buildutil + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "io/ioutil" + "os" + "path" + "path/filepath" + "strings" +) + +// ParseFile behaves like parser.ParseFile, +// but uses the build context's file system interface, if any. +// +// If file is not absolute (as defined by IsAbsPath), the (dir, file) +// components are joined using JoinPath; dir must be absolute. +// +// The displayPath function, if provided, is used to transform the +// filename that will be attached to the ASTs. +// +// TODO(adonovan): call this from go/loader.parseFiles when the tree thaws. +// +func ParseFile(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, file string, mode parser.Mode) (*ast.File, error) { + if !IsAbsPath(ctxt, file) { + file = JoinPath(ctxt, dir, file) + } + rd, err := OpenFile(ctxt, file) + if err != nil { + return nil, err + } + defer rd.Close() // ignore error + if displayPath != nil { + file = displayPath(file) + } + return parser.ParseFile(fset, file, rd, mode) +} + +// ContainingPackage returns the package containing filename. +// +// If filename is not absolute, it is interpreted relative to working directory dir. +// All I/O is via the build context's file system interface, if any. +// +// The '...Files []string' fields of the resulting build.Package are not +// populated (build.FindOnly mode). +// +func ContainingPackage(ctxt *build.Context, dir, filename string) (*build.Package, error) { + if !IsAbsPath(ctxt, filename) { + filename = JoinPath(ctxt, dir, filename) + } + + // We must not assume the file tree uses + // "/" always, + // `\` always, + // or os.PathSeparator (which varies by platform), + // but to make any progress, we are forced to assume that + // paths will not use `\` unless the PathSeparator + // is also `\`, thus we can rely on filepath.ToSlash for some sanity. + + dirSlash := path.Dir(filepath.ToSlash(filename)) + "/" + + // We assume that no source root (GOPATH[i] or GOROOT) contains any other. + for _, srcdir := range ctxt.SrcDirs() { + srcdirSlash := filepath.ToSlash(srcdir) + "/" + if importPath, ok := HasSubdir(ctxt, srcdirSlash, dirSlash); ok { + return ctxt.Import(importPath, dir, build.FindOnly) + } + } + + return nil, fmt.Errorf("can't find package containing %s", filename) +} + +// -- Effective methods of file system interface ------------------------- + +// (go/build.Context defines these as methods, but does not export them.) + +// hasSubdir calls ctxt.HasSubdir (if not nil) or else uses +// the local file system to answer the question. +func HasSubdir(ctxt *build.Context, root, dir string) (rel string, ok bool) { + if f := ctxt.HasSubdir; f != nil { + return f(root, dir) + } + + // Try using paths we received. + if rel, ok = hasSubdir(root, dir); ok { + return + } + + // Try expanding symlinks and comparing + // expanded against unexpanded and + // expanded against expanded. + rootSym, _ := filepath.EvalSymlinks(root) + dirSym, _ := filepath.EvalSymlinks(dir) + + if rel, ok = hasSubdir(rootSym, dir); ok { + return + } + if rel, ok = hasSubdir(root, dirSym); ok { + return + } + return hasSubdir(rootSym, dirSym) +} + +func hasSubdir(root, dir string) (rel string, ok bool) { + const sep = string(filepath.Separator) + root = filepath.Clean(root) + if !strings.HasSuffix(root, sep) { + root += sep + } + + dir = filepath.Clean(dir) + if !strings.HasPrefix(dir, root) { + return "", false + } + + return filepath.ToSlash(dir[len(root):]), true +} + +// FileExists returns true if the specified file exists, +// using the build context's file system interface. +func FileExists(ctxt *build.Context, path string) bool { + if ctxt.OpenFile != nil { + r, err := ctxt.OpenFile(path) + if err != nil { + return false + } + r.Close() // ignore error + return true + } + _, err := os.Stat(path) + return err == nil +} + +// OpenFile behaves like os.Open, +// but uses the build context's file system interface, if any. +func OpenFile(ctxt *build.Context, path string) (io.ReadCloser, error) { + if ctxt.OpenFile != nil { + return ctxt.OpenFile(path) + } + return os.Open(path) +} + +// IsAbsPath behaves like filepath.IsAbs, +// but uses the build context's file system interface, if any. +func IsAbsPath(ctxt *build.Context, path string) bool { + if ctxt.IsAbsPath != nil { + return ctxt.IsAbsPath(path) + } + return filepath.IsAbs(path) +} + +// JoinPath behaves like filepath.Join, +// but uses the build context's file system interface, if any. +func JoinPath(ctxt *build.Context, path ...string) string { + if ctxt.JoinPath != nil { + return ctxt.JoinPath(path...) + } + return filepath.Join(path...) +} + +// IsDir behaves like os.Stat plus IsDir, +// but uses the build context's file system interface, if any. +func IsDir(ctxt *build.Context, path string) bool { + if ctxt.IsDir != nil { + return ctxt.IsDir(path) + } + fi, err := os.Stat(path) + return err == nil && fi.IsDir() +} + +// ReadDir behaves like ioutil.ReadDir, +// but uses the build context's file system interface, if any. +func ReadDir(ctxt *build.Context, path string) ([]os.FileInfo, error) { + if ctxt.ReadDir != nil { + return ctxt.ReadDir(path) + } + return ioutil.ReadDir(path) +} + +// SplitPathList behaves like filepath.SplitList, +// but uses the build context's file system interface, if any. +func SplitPathList(ctxt *build.Context, s string) []string { + if ctxt.SplitPathList != nil { + return ctxt.SplitPathList(s) + } + return filepath.SplitList(s) +} + +// sameFile returns true if x and y have the same basename and denote +// the same file. +// +func sameFile(x, y string) bool { + if path.Clean(x) == path.Clean(y) { + return true + } + if filepath.Base(x) == filepath.Base(y) { // (optimisation) + if xi, err := os.Stat(x); err == nil { + if yi, err := os.Stat(y); err == nil { + return os.SameFile(xi, yi) + } + } + } + return false +} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go new file mode 100644 index 000000000..d01fb04a6 --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo.go @@ -0,0 +1,222 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package cgo handles cgo preprocessing of files containing `import "C"`. +// +// DESIGN +// +// The approach taken is to run the cgo processor on the package's +// CgoFiles and parse the output, faking the filenames of the +// resulting ASTs so that the synthetic file containing the C types is +// called "C" (e.g. "~/go/src/net/C") and the preprocessed files +// have their original names (e.g. "~/go/src/net/cgo_unix.go"), +// not the names of the actual temporary files. +// +// The advantage of this approach is its fidelity to 'go build'. The +// downside is that the token.Position.Offset for each AST node is +// incorrect, being an offset within the temporary file. Line numbers +// should still be correct because of the //line comments. +// +// The logic of this file is mostly plundered from the 'go build' +// tool, which also invokes the cgo preprocessor. +// +// +// REJECTED ALTERNATIVE +// +// An alternative approach that we explored is to extend go/types' +// Importer mechanism to provide the identity of the importing package +// so that each time `import "C"` appears it resolves to a different +// synthetic package containing just the objects needed in that case. +// The loader would invoke cgo but parse only the cgo_types.go file +// defining the package-level objects, discarding the other files +// resulting from preprocessing. +// +// The benefit of this approach would have been that source-level +// syntax information would correspond exactly to the original cgo +// file, with no preprocessing involved, making source tools like +// godoc, guru, and eg happy. However, the approach was rejected +// due to the additional complexity it would impose on go/types. (It +// made for a beautiful demo, though.) +// +// cgo files, despite their *.go extension, are not legal Go source +// files per the specification since they may refer to unexported +// members of package "C" such as C.int. Also, a function such as +// C.getpwent has in effect two types, one matching its C type and one +// which additionally returns (errno C.int). The cgo preprocessor +// uses name mangling to distinguish these two functions in the +// processed code, but go/types would need to duplicate this logic in +// its handling of function calls, analogous to the treatment of map +// lookups in which y=m[k] and y,ok=m[k] are both legal. + +package cgo + +import ( + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "io/ioutil" + "log" + "os" + "path/filepath" + "regexp" + "strings" + + exec "golang.org/x/sys/execabs" +) + +// ProcessFiles invokes the cgo preprocessor on bp.CgoFiles, parses +// the output and returns the resulting ASTs. +// +func ProcessFiles(bp *build.Package, fset *token.FileSet, DisplayPath func(path string) string, mode parser.Mode) ([]*ast.File, error) { + tmpdir, err := ioutil.TempDir("", strings.Replace(bp.ImportPath, "/", "_", -1)+"_C") + if err != nil { + return nil, err + } + defer os.RemoveAll(tmpdir) + + pkgdir := bp.Dir + if DisplayPath != nil { + pkgdir = DisplayPath(pkgdir) + } + + cgoFiles, cgoDisplayFiles, err := Run(bp, pkgdir, tmpdir, false) + if err != nil { + return nil, err + } + var files []*ast.File + for i := range cgoFiles { + rd, err := os.Open(cgoFiles[i]) + if err != nil { + return nil, err + } + display := filepath.Join(bp.Dir, cgoDisplayFiles[i]) + f, err := parser.ParseFile(fset, display, rd, mode) + rd.Close() + if err != nil { + return nil, err + } + files = append(files, f) + } + return files, nil +} + +var cgoRe = regexp.MustCompile(`[/\\:]`) + +// Run invokes the cgo preprocessor on bp.CgoFiles and returns two +// lists of files: the resulting processed files (in temporary +// directory tmpdir) and the corresponding names of the unprocessed files. +// +// Run is adapted from (*builder).cgo in +// $GOROOT/src/cmd/go/build.go, but these features are unsupported: +// Objective C, CGOPKGPATH, CGO_FLAGS. +// +// If useabs is set to true, absolute paths of the bp.CgoFiles will be passed in +// to the cgo preprocessor. This in turn will set the // line comments +// referring to those files to use absolute paths. This is needed for +// go/packages using the legacy go list support so it is able to find +// the original files. +func Run(bp *build.Package, pkgdir, tmpdir string, useabs bool) (files, displayFiles []string, err error) { + cgoCPPFLAGS, _, _, _ := cflags(bp, true) + _, cgoexeCFLAGS, _, _ := cflags(bp, false) + + if len(bp.CgoPkgConfig) > 0 { + pcCFLAGS, err := pkgConfigFlags(bp) + if err != nil { + return nil, nil, err + } + cgoCPPFLAGS = append(cgoCPPFLAGS, pcCFLAGS...) + } + + // Allows including _cgo_export.h from .[ch] files in the package. + cgoCPPFLAGS = append(cgoCPPFLAGS, "-I", tmpdir) + + // _cgo_gotypes.go (displayed "C") contains the type definitions. + files = append(files, filepath.Join(tmpdir, "_cgo_gotypes.go")) + displayFiles = append(displayFiles, "C") + for _, fn := range bp.CgoFiles { + // "foo.cgo1.go" (displayed "foo.go") is the processed Go source. + f := cgoRe.ReplaceAllString(fn[:len(fn)-len("go")], "_") + files = append(files, filepath.Join(tmpdir, f+"cgo1.go")) + displayFiles = append(displayFiles, fn) + } + + var cgoflags []string + if bp.Goroot && bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_runtime_cgo=false") + } + if bp.Goroot && bp.ImportPath == "runtime/race" || bp.ImportPath == "runtime/cgo" { + cgoflags = append(cgoflags, "-import_syscall=false") + } + + var cgoFiles []string = bp.CgoFiles + if useabs { + cgoFiles = make([]string, len(bp.CgoFiles)) + for i := range cgoFiles { + cgoFiles[i] = filepath.Join(pkgdir, bp.CgoFiles[i]) + } + } + + args := stringList( + "go", "tool", "cgo", "-objdir", tmpdir, cgoflags, "--", + cgoCPPFLAGS, cgoexeCFLAGS, cgoFiles, + ) + if false { + log.Printf("Running cgo for package %q: %s (dir=%s)", bp.ImportPath, args, pkgdir) + } + cmd := exec.Command(args[0], args[1:]...) + cmd.Dir = pkgdir + cmd.Env = append(os.Environ(), "PWD="+pkgdir) + cmd.Stdout = os.Stderr + cmd.Stderr = os.Stderr + if err := cmd.Run(); err != nil { + return nil, nil, fmt.Errorf("cgo failed: %s: %s", args, err) + } + + return files, displayFiles, nil +} + +// -- unmodified from 'go build' --------------------------------------- + +// Return the flags to use when invoking the C or C++ compilers, or cgo. +func cflags(p *build.Package, def bool) (cppflags, cflags, cxxflags, ldflags []string) { + var defaults string + if def { + defaults = "-g -O2" + } + + cppflags = stringList(envList("CGO_CPPFLAGS", ""), p.CgoCPPFLAGS) + cflags = stringList(envList("CGO_CFLAGS", defaults), p.CgoCFLAGS) + cxxflags = stringList(envList("CGO_CXXFLAGS", defaults), p.CgoCXXFLAGS) + ldflags = stringList(envList("CGO_LDFLAGS", defaults), p.CgoLDFLAGS) + return +} + +// envList returns the value of the given environment variable broken +// into fields, using the default value when the variable is empty. +func envList(key, def string) []string { + v := os.Getenv(key) + if v == "" { + v = def + } + return strings.Fields(v) +} + +// stringList's arguments should be a sequence of string or []string values. +// stringList flattens them into a single []string. +func stringList(args ...interface{}) []string { + var x []string + for _, arg := range args { + switch arg := arg.(type) { + case []string: + x = append(x, arg...) + case string: + x = append(x, arg) + default: + panic("stringList: invalid argument") + } + } + return x +} diff --git a/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go new file mode 100644 index 000000000..7d94bbc1e --- /dev/null +++ b/vendor/golang.org/x/tools/go/internal/cgo/cgo_pkgconfig.go @@ -0,0 +1,39 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package cgo + +import ( + "errors" + "fmt" + "go/build" + exec "golang.org/x/sys/execabs" + "strings" +) + +// pkgConfig runs pkg-config with the specified arguments and returns the flags it prints. +func pkgConfig(mode string, pkgs []string) (flags []string, err error) { + cmd := exec.Command("pkg-config", append([]string{mode}, pkgs...)...) + out, err := cmd.CombinedOutput() + if err != nil { + s := fmt.Sprintf("%s failed: %v", strings.Join(cmd.Args, " "), err) + if len(out) > 0 { + s = fmt.Sprintf("%s: %s", s, out) + } + return nil, errors.New(s) + } + if len(out) > 0 { + flags = strings.Fields(string(out)) + } + return +} + +// pkgConfigFlags calls pkg-config if needed and returns the cflags +// needed to build the package. +func pkgConfigFlags(p *build.Package) (cflags []string, err error) { + if len(p.CgoPkgConfig) == 0 { + return nil, nil + } + return pkgConfig("--cflags", p.CgoPkgConfig) +} diff --git a/vendor/golang.org/x/tools/go/loader/doc.go b/vendor/golang.org/x/tools/go/loader/doc.go new file mode 100644 index 000000000..c5aa31c1a --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/doc.go @@ -0,0 +1,204 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package loader loads a complete Go program from source code, parsing +// and type-checking the initial packages plus their transitive closure +// of dependencies. The ASTs and the derived facts are retained for +// later use. +// +// Deprecated: This is an older API and does not have support +// for modules. Use golang.org/x/tools/go/packages instead. +// +// The package defines two primary types: Config, which specifies a +// set of initial packages to load and various other options; and +// Program, which is the result of successfully loading the packages +// specified by a configuration. +// +// The configuration can be set directly, but *Config provides various +// convenience methods to simplify the common cases, each of which can +// be called any number of times. Finally, these are followed by a +// call to Load() to actually load and type-check the program. +// +// var conf loader.Config +// +// // Use the command-line arguments to specify +// // a set of initial packages to load from source. +// // See FromArgsUsage for help. +// rest, err := conf.FromArgs(os.Args[1:], wantTests) +// +// // Parse the specified files and create an ad hoc package with path "foo". +// // All files must have the same 'package' declaration. +// conf.CreateFromFilenames("foo", "foo.go", "bar.go") +// +// // Create an ad hoc package with path "foo" from +// // the specified already-parsed files. +// // All ASTs must have the same 'package' declaration. +// conf.CreateFromFiles("foo", parsedFiles) +// +// // Add "runtime" to the set of packages to be loaded. +// conf.Import("runtime") +// +// // Adds "fmt" and "fmt_test" to the set of packages +// // to be loaded. "fmt" will include *_test.go files. +// conf.ImportWithTests("fmt") +// +// // Finally, load all the packages specified by the configuration. +// prog, err := conf.Load() +// +// See examples_test.go for examples of API usage. +// +// +// CONCEPTS AND TERMINOLOGY +// +// The WORKSPACE is the set of packages accessible to the loader. The +// workspace is defined by Config.Build, a *build.Context. The +// default context treats subdirectories of $GOROOT and $GOPATH as +// packages, but this behavior may be overridden. +// +// An AD HOC package is one specified as a set of source files on the +// command line. In the simplest case, it may consist of a single file +// such as $GOROOT/src/net/http/triv.go. +// +// EXTERNAL TEST packages are those comprised of a set of *_test.go +// files all with the same 'package foo_test' declaration, all in the +// same directory. (go/build.Package calls these files XTestFiles.) +// +// An IMPORTABLE package is one that can be referred to by some import +// spec. Every importable package is uniquely identified by its +// PACKAGE PATH or just PATH, a string such as "fmt", "encoding/json", +// or "cmd/vendor/golang.org/x/arch/x86/x86asm". A package path +// typically denotes a subdirectory of the workspace. +// +// An import declaration uses an IMPORT PATH to refer to a package. +// Most import declarations use the package path as the import path. +// +// Due to VENDORING (https://golang.org/s/go15vendor), the +// interpretation of an import path may depend on the directory in which +// it appears. To resolve an import path to a package path, go/build +// must search the enclosing directories for a subdirectory named +// "vendor". +// +// ad hoc packages and external test packages are NON-IMPORTABLE. The +// path of an ad hoc package is inferred from the package +// declarations of its files and is therefore not a unique package key. +// For example, Config.CreatePkgs may specify two initial ad hoc +// packages, both with path "main". +// +// An AUGMENTED package is an importable package P plus all the +// *_test.go files with same 'package foo' declaration as P. +// (go/build.Package calls these files TestFiles.) +// +// The INITIAL packages are those specified in the configuration. A +// DEPENDENCY is a package loaded to satisfy an import in an initial +// package or another dependency. +// +package loader + +// IMPLEMENTATION NOTES +// +// 'go test', in-package test files, and import cycles +// --------------------------------------------------- +// +// An external test package may depend upon members of the augmented +// package that are not in the unaugmented package, such as functions +// that expose internals. (See bufio/export_test.go for an example.) +// So, the loader must ensure that for each external test package +// it loads, it also augments the corresponding non-test package. +// +// The import graph over n unaugmented packages must be acyclic; the +// import graph over n-1 unaugmented packages plus one augmented +// package must also be acyclic. ('go test' relies on this.) But the +// import graph over n augmented packages may contain cycles. +// +// First, all the (unaugmented) non-test packages and their +// dependencies are imported in the usual way; the loader reports an +// error if it detects an import cycle. +// +// Then, each package P for which testing is desired is augmented by +// the list P' of its in-package test files, by calling +// (*types.Checker).Files. This arrangement ensures that P' may +// reference definitions within P, but P may not reference definitions +// within P'. Furthermore, P' may import any other package, including +// ones that depend upon P, without an import cycle error. +// +// Consider two packages A and B, both of which have lists of +// in-package test files we'll call A' and B', and which have the +// following import graph edges: +// B imports A +// B' imports A +// A' imports B +// This last edge would be expected to create an error were it not +// for the special type-checking discipline above. +// Cycles of size greater than two are possible. For example: +// compress/bzip2/bzip2_test.go (package bzip2) imports "io/ioutil" +// io/ioutil/tempfile_test.go (package ioutil) imports "regexp" +// regexp/exec_test.go (package regexp) imports "compress/bzip2" +// +// +// Concurrency +// ----------- +// +// Let us define the import dependency graph as follows. Each node is a +// list of files passed to (Checker).Files at once. Many of these lists +// are the production code of an importable Go package, so those nodes +// are labelled by the package's path. The remaining nodes are +// ad hoc packages and lists of in-package *_test.go files that augment +// an importable package; those nodes have no label. +// +// The edges of the graph represent import statements appearing within a +// file. An edge connects a node (a list of files) to the node it +// imports, which is importable and thus always labelled. +// +// Loading is controlled by this dependency graph. +// +// To reduce I/O latency, we start loading a package's dependencies +// asynchronously as soon as we've parsed its files and enumerated its +// imports (scanImports). This performs a preorder traversal of the +// import dependency graph. +// +// To exploit hardware parallelism, we type-check unrelated packages in +// parallel, where "unrelated" means not ordered by the partial order of +// the import dependency graph. +// +// We use a concurrency-safe non-blocking cache (importer.imported) to +// record the results of type-checking, whether success or failure. An +// entry is created in this cache by startLoad the first time the +// package is imported. The first goroutine to request an entry becomes +// responsible for completing the task and broadcasting completion to +// subsequent requestors, which block until then. +// +// Type checking occurs in (parallel) postorder: we cannot type-check a +// set of files until we have loaded and type-checked all of their +// immediate dependencies (and thus all of their transitive +// dependencies). If the input were guaranteed free of import cycles, +// this would be trivial: we could simply wait for completion of the +// dependencies and then invoke the typechecker. +// +// But as we saw in the 'go test' section above, some cycles in the +// import graph over packages are actually legal, so long as the +// cycle-forming edge originates in the in-package test files that +// augment the package. This explains why the nodes of the import +// dependency graph are not packages, but lists of files: the unlabelled +// nodes avoid the cycles. Consider packages A and B where B imports A +// and A's in-package tests AT import B. The naively constructed import +// graph over packages would contain a cycle (A+AT) --> B --> (A+AT) but +// the graph over lists of files is AT --> B --> A, where AT is an +// unlabelled node. +// +// Awaiting completion of the dependencies in a cyclic graph would +// deadlock, so we must materialize the import dependency graph (as +// importer.graph) and check whether each import edge forms a cycle. If +// x imports y, and the graph already contains a path from y to x, then +// there is an import cycle, in which case the processing of x must not +// wait for the completion of processing of y. +// +// When the type-checker makes a callback (doImport) to the loader for a +// given import edge, there are two possible cases. In the normal case, +// the dependency has already been completely type-checked; doImport +// does a cache lookup and returns it. In the cyclic case, the entry in +// the cache is still necessarily incomplete, indicating a cycle. We +// perform the cycle check again to obtain the error message, and return +// the error. +// +// The result of using concurrency is about a 2.5x speedup for stdlib_test. diff --git a/vendor/golang.org/x/tools/go/loader/loader.go b/vendor/golang.org/x/tools/go/loader/loader.go new file mode 100644 index 000000000..508a1fd01 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/loader.go @@ -0,0 +1,1078 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +// See doc.go for package documentation and implementation notes. + +import ( + "errors" + "fmt" + "go/ast" + "go/build" + "go/parser" + "go/token" + "go/types" + "os" + "path/filepath" + "sort" + "strings" + "sync" + "time" + + "golang.org/x/tools/go/ast/astutil" + "golang.org/x/tools/go/internal/cgo" +) + +var ignoreVendor build.ImportMode + +const trace = false // show timing info for type-checking + +// Config specifies the configuration for loading a whole program from +// Go source code. +// The zero value for Config is a ready-to-use default configuration. +type Config struct { + // Fset is the file set for the parser to use when loading the + // program. If nil, it may be lazily initialized by any + // method of Config. + Fset *token.FileSet + + // ParserMode specifies the mode to be used by the parser when + // loading source packages. + ParserMode parser.Mode + + // TypeChecker contains options relating to the type checker. + // + // The supplied IgnoreFuncBodies is not used; the effective + // value comes from the TypeCheckFuncBodies func below. + // The supplied Import function is not used either. + TypeChecker types.Config + + // TypeCheckFuncBodies is a predicate over package paths. + // A package for which the predicate is false will + // have its package-level declarations type checked, but not + // its function bodies; this can be used to quickly load + // dependencies from source. If nil, all func bodies are type + // checked. + TypeCheckFuncBodies func(path string) bool + + // If Build is non-nil, it is used to locate source packages. + // Otherwise &build.Default is used. + // + // By default, cgo is invoked to preprocess Go files that + // import the fake package "C". This behaviour can be + // disabled by setting CGO_ENABLED=0 in the environment prior + // to startup, or by setting Build.CgoEnabled=false. + Build *build.Context + + // The current directory, used for resolving relative package + // references such as "./go/loader". If empty, os.Getwd will be + // used instead. + Cwd string + + // If DisplayPath is non-nil, it is used to transform each + // file name obtained from Build.Import(). This can be used + // to prevent a virtualized build.Config's file names from + // leaking into the user interface. + DisplayPath func(path string) string + + // If AllowErrors is true, Load will return a Program even + // if some of the its packages contained I/O, parser or type + // errors; such errors are accessible via PackageInfo.Errors. If + // false, Load will fail if any package had an error. + AllowErrors bool + + // CreatePkgs specifies a list of non-importable initial + // packages to create. The resulting packages will appear in + // the corresponding elements of the Program.Created slice. + CreatePkgs []PkgSpec + + // ImportPkgs specifies a set of initial packages to load. + // The map keys are package paths. + // + // The map value indicates whether to load tests. If true, Load + // will add and type-check two lists of files to the package: + // non-test files followed by in-package *_test.go files. In + // addition, it will append the external test package (if any) + // to Program.Created. + ImportPkgs map[string]bool + + // FindPackage is called during Load to create the build.Package + // for a given import path from a given directory. + // If FindPackage is nil, (*build.Context).Import is used. + // A client may use this hook to adapt to a proprietary build + // system that does not follow the "go build" layout + // conventions, for example. + // + // It must be safe to call concurrently from multiple goroutines. + FindPackage func(ctxt *build.Context, importPath, fromDir string, mode build.ImportMode) (*build.Package, error) + + // AfterTypeCheck is called immediately after a list of files + // has been type-checked and appended to info.Files. + // + // This optional hook function is the earliest opportunity for + // the client to observe the output of the type checker, + // which may be useful to reduce analysis latency when loading + // a large program. + // + // The function is permitted to modify info.Info, for instance + // to clear data structures that are no longer needed, which can + // dramatically reduce peak memory consumption. + // + // The function may be called twice for the same PackageInfo: + // once for the files of the package and again for the + // in-package test files. + // + // It must be safe to call concurrently from multiple goroutines. + AfterTypeCheck func(info *PackageInfo, files []*ast.File) +} + +// A PkgSpec specifies a non-importable package to be created by Load. +// Files are processed first, but typically only one of Files and +// Filenames is provided. The path needn't be globally unique. +// +// For vendoring purposes, the package's directory is the one that +// contains the first file. +type PkgSpec struct { + Path string // package path ("" => use package declaration) + Files []*ast.File // ASTs of already-parsed files + Filenames []string // names of files to be parsed +} + +// A Program is a Go program loaded from source as specified by a Config. +type Program struct { + Fset *token.FileSet // the file set for this program + + // Created[i] contains the initial package whose ASTs or + // filenames were supplied by Config.CreatePkgs[i], followed by + // the external test package, if any, of each package in + // Config.ImportPkgs ordered by ImportPath. + // + // NOTE: these files must not import "C". Cgo preprocessing is + // only performed on imported packages, not ad hoc packages. + // + // TODO(adonovan): we need to copy and adapt the logic of + // goFilesPackage (from $GOROOT/src/cmd/go/build.go) and make + // Config.Import and Config.Create methods return the same kind + // of entity, essentially a build.Package. + // Perhaps we can even reuse that type directly. + Created []*PackageInfo + + // Imported contains the initially imported packages, + // as specified by Config.ImportPkgs. + Imported map[string]*PackageInfo + + // AllPackages contains the PackageInfo of every package + // encountered by Load: all initial packages and all + // dependencies, including incomplete ones. + AllPackages map[*types.Package]*PackageInfo + + // importMap is the canonical mapping of package paths to + // packages. It contains all Imported initial packages, but not + // Created ones, and all imported dependencies. + importMap map[string]*types.Package +} + +// PackageInfo holds the ASTs and facts derived by the type-checker +// for a single package. +// +// Not mutated once exposed via the API. +// +type PackageInfo struct { + Pkg *types.Package + Importable bool // true if 'import "Pkg.Path()"' would resolve to this + TransitivelyErrorFree bool // true if Pkg and all its dependencies are free of errors + Files []*ast.File // syntax trees for the package's files + Errors []error // non-nil if the package had errors + types.Info // type-checker deductions. + dir string // package directory + + checker *types.Checker // transient type-checker state + errorFunc func(error) +} + +func (info *PackageInfo) String() string { return info.Pkg.Path() } + +func (info *PackageInfo) appendError(err error) { + if info.errorFunc != nil { + info.errorFunc(err) + } else { + fmt.Fprintln(os.Stderr, err) + } + info.Errors = append(info.Errors, err) +} + +func (conf *Config) fset() *token.FileSet { + if conf.Fset == nil { + conf.Fset = token.NewFileSet() + } + return conf.Fset +} + +// ParseFile is a convenience function (intended for testing) that invokes +// the parser using the Config's FileSet, which is initialized if nil. +// +// src specifies the parser input as a string, []byte, or io.Reader, and +// filename is its apparent name. If src is nil, the contents of +// filename are read from the file system. +// +func (conf *Config) ParseFile(filename string, src interface{}) (*ast.File, error) { + // TODO(adonovan): use conf.build() etc like parseFiles does. + return parser.ParseFile(conf.fset(), filename, src, conf.ParserMode) +} + +// FromArgsUsage is a partial usage message that applications calling +// FromArgs may wish to include in their -help output. +const FromArgsUsage = ` + is a list of arguments denoting a set of initial packages. +It may take one of two forms: + +1. A list of *.go source files. + + All of the specified files are loaded, parsed and type-checked + as a single package. All the files must belong to the same directory. + +2. A list of import paths, each denoting a package. + + The package's directory is found relative to the $GOROOT and + $GOPATH using similar logic to 'go build', and the *.go files in + that directory are loaded, parsed and type-checked as a single + package. + + In addition, all *_test.go files in the directory are then loaded + and parsed. Those files whose package declaration equals that of + the non-*_test.go files are included in the primary package. Test + files whose package declaration ends with "_test" are type-checked + as another package, the 'external' test package, so that a single + import path may denote two packages. (Whether this behaviour is + enabled is tool-specific, and may depend on additional flags.) + +A '--' argument terminates the list of packages. +` + +// FromArgs interprets args as a set of initial packages to load from +// source and updates the configuration. It returns the list of +// unconsumed arguments. +// +// It is intended for use in command-line interfaces that require a +// set of initial packages to be specified; see FromArgsUsage message +// for details. +// +// Only superficial errors are reported at this stage; errors dependent +// on I/O are detected during Load. +// +func (conf *Config) FromArgs(args []string, xtest bool) ([]string, error) { + var rest []string + for i, arg := range args { + if arg == "--" { + rest = args[i+1:] + args = args[:i] + break // consume "--" and return the remaining args + } + } + + if len(args) > 0 && strings.HasSuffix(args[0], ".go") { + // Assume args is a list of a *.go files + // denoting a single ad hoc package. + for _, arg := range args { + if !strings.HasSuffix(arg, ".go") { + return nil, fmt.Errorf("named files must be .go files: %s", arg) + } + } + conf.CreateFromFilenames("", args...) + } else { + // Assume args are directories each denoting a + // package and (perhaps) an external test, iff xtest. + for _, arg := range args { + if xtest { + conf.ImportWithTests(arg) + } else { + conf.Import(arg) + } + } + } + + return rest, nil +} + +// CreateFromFilenames is a convenience function that adds +// a conf.CreatePkgs entry to create a package of the specified *.go +// files. +// +func (conf *Config) CreateFromFilenames(path string, filenames ...string) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Filenames: filenames}) +} + +// CreateFromFiles is a convenience function that adds a conf.CreatePkgs +// entry to create package of the specified path and parsed files. +// +func (conf *Config) CreateFromFiles(path string, files ...*ast.File) { + conf.CreatePkgs = append(conf.CreatePkgs, PkgSpec{Path: path, Files: files}) +} + +// ImportWithTests is a convenience function that adds path to +// ImportPkgs, the set of initial source packages located relative to +// $GOPATH. The package will be augmented by any *_test.go files in +// its directory that contain a "package x" (not "package x_test") +// declaration. +// +// In addition, if any *_test.go files contain a "package x_test" +// declaration, an additional package comprising just those files will +// be added to CreatePkgs. +// +func (conf *Config) ImportWithTests(path string) { conf.addImport(path, true) } + +// Import is a convenience function that adds path to ImportPkgs, the +// set of initial packages that will be imported from source. +// +func (conf *Config) Import(path string) { conf.addImport(path, false) } + +func (conf *Config) addImport(path string, tests bool) { + if path == "C" { + return // ignore; not a real package + } + if conf.ImportPkgs == nil { + conf.ImportPkgs = make(map[string]bool) + } + conf.ImportPkgs[path] = conf.ImportPkgs[path] || tests +} + +// PathEnclosingInterval returns the PackageInfo and ast.Node that +// contain source interval [start, end), and all the node's ancestors +// up to the AST root. It searches all ast.Files of all packages in prog. +// exact is defined as for astutil.PathEnclosingInterval. +// +// The zero value is returned if not found. +// +func (prog *Program) PathEnclosingInterval(start, end token.Pos) (pkg *PackageInfo, path []ast.Node, exact bool) { + for _, info := range prog.AllPackages { + for _, f := range info.Files { + if f.Pos() == token.NoPos { + // This can happen if the parser saw + // too many errors and bailed out. + // (Use parser.AllErrors to prevent that.) + continue + } + if !tokenFileContainsPos(prog.Fset.File(f.Pos()), start) { + continue + } + if path, exact := astutil.PathEnclosingInterval(f, start, end); path != nil { + return info, path, exact + } + } + } + return nil, nil, false +} + +// InitialPackages returns a new slice containing the set of initial +// packages (Created + Imported) in unspecified order. +// +func (prog *Program) InitialPackages() []*PackageInfo { + infos := make([]*PackageInfo, 0, len(prog.Created)+len(prog.Imported)) + infos = append(infos, prog.Created...) + for _, info := range prog.Imported { + infos = append(infos, info) + } + return infos +} + +// Package returns the ASTs and results of type checking for the +// specified package. +func (prog *Program) Package(path string) *PackageInfo { + if info, ok := prog.AllPackages[prog.importMap[path]]; ok { + return info + } + for _, info := range prog.Created { + if path == info.Pkg.Path() { + return info + } + } + return nil +} + +// ---------- Implementation ---------- + +// importer holds the working state of the algorithm. +type importer struct { + conf *Config // the client configuration + start time.Time // for logging + + progMu sync.Mutex // guards prog + prog *Program // the resulting program + + // findpkg is a memoization of FindPackage. + findpkgMu sync.Mutex // guards findpkg + findpkg map[findpkgKey]*findpkgValue + + importedMu sync.Mutex // guards imported + imported map[string]*importInfo // all imported packages (incl. failures) by import path + + // import dependency graph: graph[x][y] => x imports y + // + // Since non-importable packages cannot be cyclic, we ignore + // their imports, thus we only need the subgraph over importable + // packages. Nodes are identified by their import paths. + graphMu sync.Mutex + graph map[string]map[string]bool +} + +type findpkgKey struct { + importPath string + fromDir string + mode build.ImportMode +} + +type findpkgValue struct { + ready chan struct{} // closed to broadcast readiness + bp *build.Package + err error +} + +// importInfo tracks the success or failure of a single import. +// +// Upon completion, exactly one of info and err is non-nil: +// info on successful creation of a package, err otherwise. +// A successful package may still contain type errors. +// +type importInfo struct { + path string // import path + info *PackageInfo // results of typechecking (including errors) + complete chan struct{} // closed to broadcast that info is set. +} + +// awaitCompletion blocks until ii is complete, +// i.e. the info field is safe to inspect. +func (ii *importInfo) awaitCompletion() { + <-ii.complete // wait for close +} + +// Complete marks ii as complete. +// Its info and err fields will not be subsequently updated. +func (ii *importInfo) Complete(info *PackageInfo) { + if info == nil { + panic("info == nil") + } + ii.info = info + close(ii.complete) +} + +type importError struct { + path string // import path + err error // reason for failure to create a package +} + +// Load creates the initial packages specified by conf.{Create,Import}Pkgs, +// loading their dependencies packages as needed. +// +// On success, Load returns a Program containing a PackageInfo for +// each package. On failure, it returns an error. +// +// If AllowErrors is true, Load will return a Program even if some +// packages contained I/O, parser or type errors, or if dependencies +// were missing. (Such errors are accessible via PackageInfo.Errors. If +// false, Load will fail if any package had an error. +// +// It is an error if no packages were loaded. +// +func (conf *Config) Load() (*Program, error) { + // Create a simple default error handler for parse/type errors. + if conf.TypeChecker.Error == nil { + conf.TypeChecker.Error = func(e error) { fmt.Fprintln(os.Stderr, e) } + } + + // Set default working directory for relative package references. + if conf.Cwd == "" { + var err error + conf.Cwd, err = os.Getwd() + if err != nil { + return nil, err + } + } + + // Install default FindPackage hook using go/build logic. + if conf.FindPackage == nil { + conf.FindPackage = (*build.Context).Import + } + + prog := &Program{ + Fset: conf.fset(), + Imported: make(map[string]*PackageInfo), + importMap: make(map[string]*types.Package), + AllPackages: make(map[*types.Package]*PackageInfo), + } + + imp := importer{ + conf: conf, + prog: prog, + findpkg: make(map[findpkgKey]*findpkgValue), + imported: make(map[string]*importInfo), + start: time.Now(), + graph: make(map[string]map[string]bool), + } + + // -- loading proper (concurrent phase) -------------------------------- + + var errpkgs []string // packages that contained errors + + // Load the initially imported packages and their dependencies, + // in parallel. + // No vendor check on packages imported from the command line. + infos, importErrors := imp.importAll("", conf.Cwd, conf.ImportPkgs, ignoreVendor) + for _, ie := range importErrors { + conf.TypeChecker.Error(ie.err) // failed to create package + errpkgs = append(errpkgs, ie.path) + } + for _, info := range infos { + prog.Imported[info.Pkg.Path()] = info + } + + // Augment the designated initial packages by their tests. + // Dependencies are loaded in parallel. + var xtestPkgs []*build.Package + for importPath, augment := range conf.ImportPkgs { + if !augment { + continue + } + + // No vendor check on packages imported from command line. + bp, err := imp.findPackage(importPath, conf.Cwd, ignoreVendor) + if err != nil { + // Package not found, or can't even parse package declaration. + // Already reported by previous loop; ignore it. + continue + } + + // Needs external test package? + if len(bp.XTestGoFiles) > 0 { + xtestPkgs = append(xtestPkgs, bp) + } + + // Consult the cache using the canonical package path. + path := bp.ImportPath + imp.importedMu.Lock() // (unnecessary, we're sequential here) + ii, ok := imp.imported[path] + // Paranoid checks added due to issue #11012. + if !ok { + // Unreachable. + // The previous loop called importAll and thus + // startLoad for each path in ImportPkgs, which + // populates imp.imported[path] with a non-zero value. + panic(fmt.Sprintf("imported[%q] not found", path)) + } + if ii == nil { + // Unreachable. + // The ii values in this loop are the same as in + // the previous loop, which enforced the invariant + // that at least one of ii.err and ii.info is non-nil. + panic(fmt.Sprintf("imported[%q] == nil", path)) + } + if ii.info == nil { + // Unreachable. + // awaitCompletion has the postcondition + // ii.info != nil. + panic(fmt.Sprintf("imported[%q].info = nil", path)) + } + info := ii.info + imp.importedMu.Unlock() + + // Parse the in-package test files. + files, errs := imp.conf.parsePackageFiles(bp, 't') + for _, err := range errs { + info.appendError(err) + } + + // The test files augmenting package P cannot be imported, + // but may import packages that import P, + // so we must disable the cycle check. + imp.addFiles(info, files, false) + } + + createPkg := func(path, dir string, files []*ast.File, errs []error) { + info := imp.newPackageInfo(path, dir) + for _, err := range errs { + info.appendError(err) + } + + // Ad hoc packages are non-importable, + // so no cycle check is needed. + // addFiles loads dependencies in parallel. + imp.addFiles(info, files, false) + prog.Created = append(prog.Created, info) + } + + // Create packages specified by conf.CreatePkgs. + for _, cp := range conf.CreatePkgs { + files, errs := parseFiles(conf.fset(), conf.build(), nil, conf.Cwd, cp.Filenames, conf.ParserMode) + files = append(files, cp.Files...) + + path := cp.Path + if path == "" { + if len(files) > 0 { + path = files[0].Name.Name + } else { + path = "(unnamed)" + } + } + + dir := conf.Cwd + if len(files) > 0 && files[0].Pos().IsValid() { + dir = filepath.Dir(conf.fset().File(files[0].Pos()).Name()) + } + createPkg(path, dir, files, errs) + } + + // Create external test packages. + sort.Sort(byImportPath(xtestPkgs)) + for _, bp := range xtestPkgs { + files, errs := imp.conf.parsePackageFiles(bp, 'x') + createPkg(bp.ImportPath+"_test", bp.Dir, files, errs) + } + + // -- finishing up (sequential) ---------------------------------------- + + if len(prog.Imported)+len(prog.Created) == 0 { + return nil, errors.New("no initial packages were loaded") + } + + // Create infos for indirectly imported packages. + // e.g. incomplete packages without syntax, loaded from export data. + for _, obj := range prog.importMap { + info := prog.AllPackages[obj] + if info == nil { + prog.AllPackages[obj] = &PackageInfo{Pkg: obj, Importable: true} + } else { + // finished + info.checker = nil + info.errorFunc = nil + } + } + + if !conf.AllowErrors { + // Report errors in indirectly imported packages. + for _, info := range prog.AllPackages { + if len(info.Errors) > 0 { + errpkgs = append(errpkgs, info.Pkg.Path()) + } + } + if errpkgs != nil { + var more string + if len(errpkgs) > 3 { + more = fmt.Sprintf(" and %d more", len(errpkgs)-3) + errpkgs = errpkgs[:3] + } + return nil, fmt.Errorf("couldn't load packages due to errors: %s%s", + strings.Join(errpkgs, ", "), more) + } + } + + markErrorFreePackages(prog.AllPackages) + + return prog, nil +} + +type byImportPath []*build.Package + +func (b byImportPath) Len() int { return len(b) } +func (b byImportPath) Less(i, j int) bool { return b[i].ImportPath < b[j].ImportPath } +func (b byImportPath) Swap(i, j int) { b[i], b[j] = b[j], b[i] } + +// markErrorFreePackages sets the TransitivelyErrorFree flag on all +// applicable packages. +func markErrorFreePackages(allPackages map[*types.Package]*PackageInfo) { + // Build the transpose of the import graph. + importedBy := make(map[*types.Package]map[*types.Package]bool) + for P := range allPackages { + for _, Q := range P.Imports() { + clients, ok := importedBy[Q] + if !ok { + clients = make(map[*types.Package]bool) + importedBy[Q] = clients + } + clients[P] = true + } + } + + // Find all packages reachable from some error package. + reachable := make(map[*types.Package]bool) + var visit func(*types.Package) + visit = func(p *types.Package) { + if !reachable[p] { + reachable[p] = true + for q := range importedBy[p] { + visit(q) + } + } + } + for _, info := range allPackages { + if len(info.Errors) > 0 { + visit(info.Pkg) + } + } + + // Mark the others as "transitively error-free". + for _, info := range allPackages { + if !reachable[info.Pkg] { + info.TransitivelyErrorFree = true + } + } +} + +// build returns the effective build context. +func (conf *Config) build() *build.Context { + if conf.Build != nil { + return conf.Build + } + return &build.Default +} + +// parsePackageFiles enumerates the files belonging to package path, +// then loads, parses and returns them, plus a list of I/O or parse +// errors that were encountered. +// +// 'which' indicates which files to include: +// 'g': include non-test *.go source files (GoFiles + processed CgoFiles) +// 't': include in-package *_test.go source files (TestGoFiles) +// 'x': include external *_test.go source files. (XTestGoFiles) +// +func (conf *Config) parsePackageFiles(bp *build.Package, which rune) ([]*ast.File, []error) { + if bp.ImportPath == "unsafe" { + return nil, nil + } + var filenames []string + switch which { + case 'g': + filenames = bp.GoFiles + case 't': + filenames = bp.TestGoFiles + case 'x': + filenames = bp.XTestGoFiles + default: + panic(which) + } + + files, errs := parseFiles(conf.fset(), conf.build(), conf.DisplayPath, bp.Dir, filenames, conf.ParserMode) + + // Preprocess CgoFiles and parse the outputs (sequentially). + if which == 'g' && bp.CgoFiles != nil { + cgofiles, err := cgo.ProcessFiles(bp, conf.fset(), conf.DisplayPath, conf.ParserMode) + if err != nil { + errs = append(errs, err) + } else { + files = append(files, cgofiles...) + } + } + + return files, errs +} + +// doImport imports the package denoted by path. +// It implements the types.Importer signature. +// +// It returns an error if a package could not be created +// (e.g. go/build or parse error), but type errors are reported via +// the types.Config.Error callback (the first of which is also saved +// in the package's PackageInfo). +// +// Idempotent. +// +func (imp *importer) doImport(from *PackageInfo, to string) (*types.Package, error) { + if to == "C" { + // This should be unreachable, but ad hoc packages are + // not currently subject to cgo preprocessing. + // See https://golang.org/issue/11627. + return nil, fmt.Errorf(`the loader doesn't cgo-process ad hoc packages like %q; see Go issue 11627`, + from.Pkg.Path()) + } + + bp, err := imp.findPackage(to, from.dir, 0) + if err != nil { + return nil, err + } + + // The standard unsafe package is handled specially, + // and has no PackageInfo. + if bp.ImportPath == "unsafe" { + return types.Unsafe, nil + } + + // Look for the package in the cache using its canonical path. + path := bp.ImportPath + imp.importedMu.Lock() + ii := imp.imported[path] + imp.importedMu.Unlock() + if ii == nil { + panic("internal error: unexpected import: " + path) + } + if ii.info != nil { + return ii.info.Pkg, nil + } + + // Import of incomplete package: this indicates a cycle. + fromPath := from.Pkg.Path() + if cycle := imp.findPath(path, fromPath); cycle != nil { + // Normalize cycle: start from alphabetically largest node. + pos, start := -1, "" + for i, s := range cycle { + if pos < 0 || s > start { + pos, start = i, s + } + } + cycle = append(cycle, cycle[:pos]...)[pos:] // rotate cycle to start from largest + cycle = append(cycle, cycle[0]) // add start node to end to show cycliness + return nil, fmt.Errorf("import cycle: %s", strings.Join(cycle, " -> ")) + } + + panic("internal error: import of incomplete (yet acyclic) package: " + fromPath) +} + +// findPackage locates the package denoted by the importPath in the +// specified directory. +func (imp *importer) findPackage(importPath, fromDir string, mode build.ImportMode) (*build.Package, error) { + // We use a non-blocking duplicate-suppressing cache (gopl.io ยง9.7) + // to avoid holding the lock around FindPackage. + key := findpkgKey{importPath, fromDir, mode} + imp.findpkgMu.Lock() + v, ok := imp.findpkg[key] + if ok { + // cache hit + imp.findpkgMu.Unlock() + + <-v.ready // wait for entry to become ready + } else { + // Cache miss: this goroutine becomes responsible for + // populating the map entry and broadcasting its readiness. + v = &findpkgValue{ready: make(chan struct{})} + imp.findpkg[key] = v + imp.findpkgMu.Unlock() + + ioLimit <- true + v.bp, v.err = imp.conf.FindPackage(imp.conf.build(), importPath, fromDir, mode) + <-ioLimit + + if _, ok := v.err.(*build.NoGoError); ok { + v.err = nil // empty directory is not an error + } + + close(v.ready) // broadcast ready condition + } + return v.bp, v.err +} + +// importAll loads, parses, and type-checks the specified packages in +// parallel and returns their completed importInfos in unspecified order. +// +// fromPath is the package path of the importing package, if it is +// importable, "" otherwise. It is used for cycle detection. +// +// fromDir is the directory containing the import declaration that +// caused these imports. +// +func (imp *importer) importAll(fromPath, fromDir string, imports map[string]bool, mode build.ImportMode) (infos []*PackageInfo, errors []importError) { + if fromPath != "" { + // We're loading a set of imports. + // + // We must record graph edges from the importing package + // to its dependencies, and check for cycles. + imp.graphMu.Lock() + deps, ok := imp.graph[fromPath] + if !ok { + deps = make(map[string]bool) + imp.graph[fromPath] = deps + } + for importPath := range imports { + deps[importPath] = true + } + imp.graphMu.Unlock() + } + + var pending []*importInfo + for importPath := range imports { + if fromPath != "" { + if cycle := imp.findPath(importPath, fromPath); cycle != nil { + // Cycle-forming import: we must not check it + // since it would deadlock. + if trace { + fmt.Fprintf(os.Stderr, "import cycle: %q\n", cycle) + } + continue + } + } + bp, err := imp.findPackage(importPath, fromDir, mode) + if err != nil { + errors = append(errors, importError{ + path: importPath, + err: err, + }) + continue + } + pending = append(pending, imp.startLoad(bp)) + } + + for _, ii := range pending { + ii.awaitCompletion() + infos = append(infos, ii.info) + } + + return infos, errors +} + +// findPath returns an arbitrary path from 'from' to 'to' in the import +// graph, or nil if there was none. +func (imp *importer) findPath(from, to string) []string { + imp.graphMu.Lock() + defer imp.graphMu.Unlock() + + seen := make(map[string]bool) + var search func(stack []string, importPath string) []string + search = func(stack []string, importPath string) []string { + if !seen[importPath] { + seen[importPath] = true + stack = append(stack, importPath) + if importPath == to { + return stack + } + for x := range imp.graph[importPath] { + if p := search(stack, x); p != nil { + return p + } + } + } + return nil + } + return search(make([]string, 0, 20), from) +} + +// startLoad initiates the loading, parsing and type-checking of the +// specified package and its dependencies, if it has not already begun. +// +// It returns an importInfo, not necessarily in a completed state. The +// caller must call awaitCompletion() before accessing its info field. +// +// startLoad is concurrency-safe and idempotent. +// +func (imp *importer) startLoad(bp *build.Package) *importInfo { + path := bp.ImportPath + imp.importedMu.Lock() + ii, ok := imp.imported[path] + if !ok { + ii = &importInfo{path: path, complete: make(chan struct{})} + imp.imported[path] = ii + go func() { + info := imp.load(bp) + ii.Complete(info) + }() + } + imp.importedMu.Unlock() + + return ii +} + +// load implements package loading by parsing Go source files +// located by go/build. +func (imp *importer) load(bp *build.Package) *PackageInfo { + info := imp.newPackageInfo(bp.ImportPath, bp.Dir) + info.Importable = true + files, errs := imp.conf.parsePackageFiles(bp, 'g') + for _, err := range errs { + info.appendError(err) + } + + imp.addFiles(info, files, true) + + imp.progMu.Lock() + imp.prog.importMap[bp.ImportPath] = info.Pkg + imp.progMu.Unlock() + + return info +} + +// addFiles adds and type-checks the specified files to info, loading +// their dependencies if needed. The order of files determines the +// package initialization order. It may be called multiple times on the +// same package. Errors are appended to the info.Errors field. +// +// cycleCheck determines whether the imports within files create +// dependency edges that should be checked for potential cycles. +// +func (imp *importer) addFiles(info *PackageInfo, files []*ast.File, cycleCheck bool) { + // Ensure the dependencies are loaded, in parallel. + var fromPath string + if cycleCheck { + fromPath = info.Pkg.Path() + } + // TODO(adonovan): opt: make the caller do scanImports. + // Callers with a build.Package can skip it. + imp.importAll(fromPath, info.dir, scanImports(files), 0) + + if trace { + fmt.Fprintf(os.Stderr, "%s: start %q (%d)\n", + time.Since(imp.start), info.Pkg.Path(), len(files)) + } + + // Don't call checker.Files on Unsafe, even with zero files, + // because it would mutate the package, which is a global. + if info.Pkg == types.Unsafe { + if len(files) > 0 { + panic(`"unsafe" package contains unexpected files`) + } + } else { + // Ignore the returned (first) error since we + // already collect them all in the PackageInfo. + info.checker.Files(files) + info.Files = append(info.Files, files...) + } + + if imp.conf.AfterTypeCheck != nil { + imp.conf.AfterTypeCheck(info, files) + } + + if trace { + fmt.Fprintf(os.Stderr, "%s: stop %q\n", + time.Since(imp.start), info.Pkg.Path()) + } +} + +func (imp *importer) newPackageInfo(path, dir string) *PackageInfo { + var pkg *types.Package + if path == "unsafe" { + pkg = types.Unsafe + } else { + pkg = types.NewPackage(path, "") + } + info := &PackageInfo{ + Pkg: pkg, + Info: types.Info{ + Types: make(map[ast.Expr]types.TypeAndValue), + Defs: make(map[*ast.Ident]types.Object), + Uses: make(map[*ast.Ident]types.Object), + Implicits: make(map[ast.Node]types.Object), + Scopes: make(map[ast.Node]*types.Scope), + Selections: make(map[*ast.SelectorExpr]*types.Selection), + }, + errorFunc: imp.conf.TypeChecker.Error, + dir: dir, + } + + // Copy the types.Config so we can vary it across PackageInfos. + tc := imp.conf.TypeChecker + tc.IgnoreFuncBodies = false + if f := imp.conf.TypeCheckFuncBodies; f != nil { + tc.IgnoreFuncBodies = !f(path) + } + tc.Importer = closure{imp, info} + tc.Error = info.appendError // appendError wraps the user's Error function + + info.checker = types.NewChecker(&tc, imp.conf.fset(), pkg, &info.Info) + imp.progMu.Lock() + imp.prog.AllPackages[pkg] = info + imp.progMu.Unlock() + return info +} + +type closure struct { + imp *importer + info *PackageInfo +} + +func (c closure) Import(to string) (*types.Package, error) { return c.imp.doImport(c.info, to) } diff --git a/vendor/golang.org/x/tools/go/loader/util.go b/vendor/golang.org/x/tools/go/loader/util.go new file mode 100644 index 000000000..7f38dd740 --- /dev/null +++ b/vendor/golang.org/x/tools/go/loader/util.go @@ -0,0 +1,124 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package loader + +import ( + "go/ast" + "go/build" + "go/parser" + "go/token" + "io" + "os" + "strconv" + "sync" + + "golang.org/x/tools/go/buildutil" +) + +// We use a counting semaphore to limit +// the number of parallel I/O calls per process. +var ioLimit = make(chan bool, 10) + +// parseFiles parses the Go source files within directory dir and +// returns the ASTs of the ones that could be at least partially parsed, +// along with a list of I/O and parse errors encountered. +// +// I/O is done via ctxt, which may specify a virtual file system. +// displayPath is used to transform the filenames attached to the ASTs. +// +func parseFiles(fset *token.FileSet, ctxt *build.Context, displayPath func(string) string, dir string, files []string, mode parser.Mode) ([]*ast.File, []error) { + if displayPath == nil { + displayPath = func(path string) string { return path } + } + var wg sync.WaitGroup + n := len(files) + parsed := make([]*ast.File, n) + errors := make([]error, n) + for i, file := range files { + if !buildutil.IsAbsPath(ctxt, file) { + file = buildutil.JoinPath(ctxt, dir, file) + } + wg.Add(1) + go func(i int, file string) { + ioLimit <- true // wait + defer func() { + wg.Done() + <-ioLimit // signal + }() + var rd io.ReadCloser + var err error + if ctxt.OpenFile != nil { + rd, err = ctxt.OpenFile(file) + } else { + rd, err = os.Open(file) + } + if err != nil { + errors[i] = err // open failed + return + } + + // ParseFile may return both an AST and an error. + parsed[i], errors[i] = parser.ParseFile(fset, displayPath(file), rd, mode) + rd.Close() + }(i, file) + } + wg.Wait() + + // Eliminate nils, preserving order. + var o int + for _, f := range parsed { + if f != nil { + parsed[o] = f + o++ + } + } + parsed = parsed[:o] + + o = 0 + for _, err := range errors { + if err != nil { + errors[o] = err + o++ + } + } + errors = errors[:o] + + return parsed, errors +} + +// scanImports returns the set of all import paths from all +// import specs in the specified files. +func scanImports(files []*ast.File) map[string]bool { + imports := make(map[string]bool) + for _, f := range files { + for _, decl := range f.Decls { + if decl, ok := decl.(*ast.GenDecl); ok && decl.Tok == token.IMPORT { + for _, spec := range decl.Specs { + spec := spec.(*ast.ImportSpec) + + // NB: do not assume the program is well-formed! + path, err := strconv.Unquote(spec.Path.Value) + if err != nil { + continue // quietly ignore the error + } + if path == "C" { + continue // skip pseudopackage + } + imports[path] = true + } + } + } + } + return imports +} + +// ---------- Internal helpers ---------- + +// TODO(adonovan): make this a method: func (*token.File) Contains(token.Pos) +func tokenFileContainsPos(f *token.File, pos token.Pos) bool { + p := int(pos) + base := f.Base() + return base <= p && p < base+f.Size() +} diff --git a/vendor/golang.org/x/tools/internal/typeparams/doc.go b/vendor/golang.org/x/tools/internal/typeparams/doc.go new file mode 100644 index 000000000..5583947e2 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/doc.go @@ -0,0 +1,11 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package typeparams provides functions to work indirectly with type parameter +// data stored in go/ast and go/types objects, while these API are guarded by a +// build constraint. +// +// This package exists to make it easier for tools to work with generic code, +// while also compiling against older Go versions. +package typeparams diff --git a/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go b/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go new file mode 100644 index 000000000..3a0abc7c1 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/notypeparams.go @@ -0,0 +1,90 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build !typeparams || !go1.17 +// +build !typeparams !go1.17 + +package typeparams + +import ( + "go/ast" + "go/types" +) + +// NOTE: doc comments must be kept in sync with typeparams.go. + +// Enabled reports whether type parameters are enabled in the current build +// environment. +const Enabled = false + +// UnpackIndex extracts all index expressions from e. For non-generic code this +// is always one expression: e.Index, but may be more than one expression for +// generic type instantiation. +func UnpackIndex(e *ast.IndexExpr) []ast.Expr { + return []ast.Expr{e.Index} +} + +// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type +// introduced to hold type arguments for generic type instantiation. +func IsListExpr(n ast.Node) bool { + return false +} + +// ForTypeDecl extracts the (possibly nil) type parameter node list from n. +func ForTypeDecl(*ast.TypeSpec) *ast.FieldList { + return nil +} + +// ForFuncDecl extracts the (possibly nil) type parameter node list from n. +func ForFuncDecl(*ast.FuncDecl) *ast.FieldList { + return nil +} + +// ForSignature extracts the (possibly empty) type parameter object list from +// sig. +func ForSignature(*types.Signature) []*types.TypeName { + return nil +} + +// HasTypeSet reports if iface has a type set. +func HasTypeSet(*types.Interface) bool { + return false +} + +// IsComparable reports if iface is the comparable interface. +func IsComparable(*types.Interface) bool { + return false +} + +// IsConstraint reports whether iface may only be used as a type parameter +// constraint (i.e. has a type set or is the comparable interface). +func IsConstraint(*types.Interface) bool { + return false +} + +// ForNamed extracts the (possibly empty) type parameter object list from +// named. +func ForNamed(*types.Named) []*types.TypeName { + return nil +} + +// NamedTArgs extracts the (possibly empty) type argument list from named. +func NamedTArgs(*types.Named) []types.Type { + return nil +} + +// InitInferred initializes info to record inferred type information. +func InitInferred(*types.Info) { +} + +// GetInferred extracts inferred type information from info for e. +// +// The expression e may have an inferred type if it is an *ast.IndexExpr +// representing partial instantiation of a generic function type for which type +// arguments have been inferred using constraint type inference, or if it is an +// *ast.CallExpr for which type type arguments have be inferred using both +// constraint type inference and function argument inference. +func GetInferred(*types.Info, ast.Expr) ([]types.Type, *types.Signature) { + return nil, nil +} diff --git a/vendor/golang.org/x/tools/internal/typeparams/typeparams.go b/vendor/golang.org/x/tools/internal/typeparams/typeparams.go new file mode 100644 index 000000000..6b7958af0 --- /dev/null +++ b/vendor/golang.org/x/tools/internal/typeparams/typeparams.go @@ -0,0 +1,105 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:build typeparams && go1.17 +// +build typeparams,go1.17 + +package typeparams + +import ( + "go/ast" + "go/types" +) + +// NOTE: doc comments must be kept in sync with notypeparams.go. + +// Enabled reports whether type parameters are enabled in the current build +// environment. +const Enabled = true + +// UnpackIndex extracts all index expressions from e. For non-generic code this +// is always one expression: e.Index, but may be more than one expression for +// generic type instantiation. +func UnpackIndex(e *ast.IndexExpr) []ast.Expr { + if x, _ := e.Index.(*ast.ListExpr); x != nil { + return x.ElemList + } + if e.Index != nil { + return []ast.Expr{e.Index} + } + return nil +} + +// IsListExpr reports whether n is an *ast.ListExpr, which is a new node type +// introduced to hold type arguments for generic type instantiation. +func IsListExpr(n ast.Node) bool { + _, ok := n.(*ast.ListExpr) + return ok +} + +// ForTypeDecl extracts the (possibly nil) type parameter node list from n. +func ForTypeDecl(n *ast.TypeSpec) *ast.FieldList { + return n.TParams +} + +// ForFuncDecl extracts the (possibly nil) type parameter node list from n. +func ForFuncDecl(n *ast.FuncDecl) *ast.FieldList { + if n.Type != nil { + return n.Type.TParams + } + return nil +} + +// ForSignature extracts the (possibly empty) type parameter object list from +// sig. +func ForSignature(sig *types.Signature) []*types.TypeName { + return sig.TParams() +} + +// HasTypeSet reports if iface has a type set. +func HasTypeSet(iface *types.Interface) bool { + return iface.HasTypeList() +} + +// IsComparable reports if iface is the comparable interface. +func IsComparable(iface *types.Interface) bool { + return iface.IsComparable() +} + +// IsConstraint reports whether iface may only be used as a type parameter +// constraint (i.e. has a type set or is the comparable interface). +func IsConstraint(iface *types.Interface) bool { + return iface.IsConstraint() +} + +// ForNamed extracts the (possibly empty) type parameter object list from +// named. +func ForNamed(named *types.Named) []*types.TypeName { + return named.TParams() +} + +// NamedTArgs extracts the (possibly empty) type argument list from named. +func NamedTArgs(named *types.Named) []types.Type { + return named.TArgs() +} + +// InitInferred initializes info to record inferred type information. +func InitInferred(info *types.Info) { + info.Inferred = make(map[ast.Expr]types.Inferred) +} + +// GetInferred extracts inferred type information from info for e. +// +// The expression e may have an inferred type if it is an *ast.IndexExpr +// representing partial instantiation of a generic function type for which type +// arguments have been inferred using constraint type inference, or if it is an +// *ast.CallExpr for which type type arguments have be inferred using both +// constraint type inference and function argument inference. +func GetInferred(info *types.Info, e ast.Expr) ([]types.Type, *types.Signature) { + if info.Inferred == nil { + return nil, nil + } + inf := info.Inferred[e] + return inf.TArgs, inf.Sig +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 6065e949e..eeeb213b1 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -90,9 +90,6 @@ github.com/franela/goblin github.com/ghodss/yaml # github.com/gin-contrib/sse v0.1.0 github.com/gin-contrib/sse -# github.com/gin-gonic/contrib v0.0.0-20191209060500-d6e26eeaa607 -## explicit -github.com/gin-gonic/contrib/ginrus # github.com/gin-gonic/gin v1.7.4 ## explicit github.com/gin-gonic/gin @@ -220,6 +217,7 @@ github.com/rogpeppe/go-internal/fmtsort # github.com/rs/zerolog v1.25.0 ## explicit github.com/rs/zerolog +github.com/rs/zerolog/cmd/lint github.com/rs/zerolog/internal/cbor github.com/rs/zerolog/internal/json github.com/rs/zerolog/log @@ -231,7 +229,6 @@ github.com/russross/meddler # github.com/shurcooL/sanitized_anchor_name v1.0.0 github.com/shurcooL/sanitized_anchor_name # github.com/sirupsen/logrus v1.8.1 -## explicit github.com/sirupsen/logrus # github.com/stretchr/objx v0.3.0 ## explicit @@ -304,6 +301,7 @@ golang.org/x/sync/errgroup # golang.org/x/sys v0.0.0-20210923061019-b8560ed6a9b7 ## explicit golang.org/x/sys/cpu +golang.org/x/sys/execabs golang.org/x/sys/internal/unsafeheader golang.org/x/sys/unix golang.org/x/sys/windows @@ -319,6 +317,12 @@ golang.org/x/text/unicode/bidi golang.org/x/text/unicode/norm # golang.org/x/time v0.0.0-20200630173020-3af7569d3a1e golang.org/x/time/rate +# golang.org/x/tools v0.1.5 +golang.org/x/tools/go/ast/astutil +golang.org/x/tools/go/buildutil +golang.org/x/tools/go/internal/cgo +golang.org/x/tools/go/loader +golang.org/x/tools/internal/typeparams # google.golang.org/appengine v1.6.7 google.golang.org/appengine/internal google.golang.org/appengine/internal/base