handle compiler or lint error

This commit is contained in:
Brad Rydzewski 2017-03-10 02:58:25 -08:00
parent bb7453262a
commit b4c4e92b5b
12 changed files with 463 additions and 553 deletions

View file

@ -8,6 +8,7 @@ type Build struct {
Parent int `json:"parent" meddler:"build_parent"`
Event string `json:"event" meddler:"build_event"`
Status string `json:"status" meddler:"build_status"`
Error string `json:"error" meddler:"build_error"`
Enqueued int64 `json:"enqueued_at" meddler:"build_enqueued"`
Created int64 `json:"created_at" meddler:"build_created"`
Started int64 `json:"started_at" meddler:"build_started"`

View file

@ -10,6 +10,7 @@ import (
"github.com/drone/drone/router/middleware/session"
"github.com/drone/drone/router/middleware/token"
"github.com/drone/drone/server"
"github.com/drone/drone/server/debug"
"github.com/drone/drone/server/template"
"github.com/drone/drone-ui/dist"
@ -175,41 +176,21 @@ func Load(middleware ...gin.HandlerFunc) http.Handler {
builds.GET("", server.GetBuildQueue)
}
agents := e.Group("/api/agents")
debugger := e.Group("/api/debug")
{
agents.Use(session.MustAdmin())
agents.GET("", server.GetAgents)
debugger.Use(session.MustAdmin())
debugger.GET("/pprof/", debug.IndexHandler())
debugger.GET("/pprof/heap", debug.HeapHandler())
debugger.GET("/pprof/goroutine", debug.GoroutineHandler())
debugger.GET("/pprof/block", debug.BlockHandler())
debugger.GET("/pprof/threadcreate", debug.ThreadCreateHandler())
debugger.GET("/pprof/cmdline", debug.CmdlineHandler())
debugger.GET("/pprof/profile", debug.ProfileHandler())
debugger.GET("/pprof/symbol", debug.SymbolHandler())
debugger.POST("/pprof/symbol", debug.SymbolHandler())
debugger.GET("/pprof/trace", debug.TraceHandler())
}
debug := e.Group("/api/debug")
{
debug.Use(session.MustAdmin())
debug.GET("/pprof/", server.IndexHandler())
debug.GET("/pprof/heap", server.HeapHandler())
debug.GET("/pprof/goroutine", server.GoroutineHandler())
debug.GET("/pprof/block", server.BlockHandler())
debug.GET("/pprof/threadcreate", server.ThreadCreateHandler())
debug.GET("/pprof/cmdline", server.CmdlineHandler())
debug.GET("/pprof/profile", server.ProfileHandler())
debug.GET("/pprof/symbol", server.SymbolHandler())
debug.POST("/pprof/symbol", server.SymbolHandler())
debug.GET("/pprof/trace", server.TraceHandler())
}
// DELETE THESE
// gitlab := e.Group("/gitlab/:owner/:name")
// {
// gitlab.Use(session.SetRepo())
// gitlab.GET("/commits/:sha", GetCommit)
// gitlab.GET("/pulls/:number", GetPullRequest)
//
// redirects := gitlab.Group("/redirect")
// {
// redirects.GET("/commits/:sha", RedirectSha)
// redirects.GET("/pulls/:number", RedirectPullRequest)
// }
// }
// bots := e.Group("/bots")
// {
// bots.Use(session.MustUser())

View file

@ -1,15 +0,0 @@
package server
import (
"github.com/drone/drone/store"
"github.com/gin-gonic/gin"
)
func GetAgents(c *gin.Context) {
agents, err := store.GetAgentList(c)
if err != nil {
c.String(500, "Error getting agent list. %s", err)
return
}
c.JSON(200, agents)
}

View file

@ -1,4 +1,4 @@
package server
package debug
import (
"net/http/pprof"

View file

@ -1,100 +0,0 @@
package server
import (
"fmt"
"net/http"
"github.com/gin-gonic/gin"
"github.com/drone/drone/router/middleware/session"
"github.com/drone/drone/shared/token"
"github.com/drone/drone/store"
)
func GetCommit(c *gin.Context) {
repo := session.Repo(c)
parsed, err := token.ParseRequest(c.Request, func(t *token.Token) (string, error) {
return repo.Hash, nil
})
if err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return
}
if parsed.Text != repo.FullName {
c.AbortWithStatus(http.StatusUnauthorized)
return
}
commit := c.Param("sha")
branch := c.Query("branch")
if len(branch) == 0 {
branch = repo.Branch
}
build, err := store.GetBuildCommit(c, repo, commit, branch)
if err != nil {
c.AbortWithError(http.StatusNotFound, err)
return
}
c.JSON(http.StatusOK, build)
}
func GetPullRequest(c *gin.Context) {
repo := session.Repo(c)
refs := fmt.Sprintf("refs/pull/%s/head", c.Param("number"))
parsed, err := token.ParseRequest(c.Request, func(t *token.Token) (string, error) {
return repo.Hash, nil
})
if err != nil {
c.AbortWithError(http.StatusBadRequest, err)
return
}
if parsed.Text != repo.FullName {
c.AbortWithStatus(http.StatusUnauthorized)
return
}
build, err := store.GetBuildRef(c, repo, refs)
if err != nil {
c.AbortWithError(http.StatusNotFound, err)
return
}
c.JSON(http.StatusOK, build)
}
func RedirectSha(c *gin.Context) {
repo := session.Repo(c)
commit := c.Param("sha")
branch := c.Query("branch")
if len(branch) == 0 {
branch = repo.Branch
}
build, err := store.GetBuildCommit(c, repo, commit, branch)
if err != nil {
c.AbortWithError(http.StatusNotFound, err)
return
}
path := fmt.Sprintf("/%s/%s/%d", repo.Owner, repo.Name, build.Number)
c.Redirect(http.StatusSeeOther, path)
}
func RedirectPullRequest(c *gin.Context) {
repo := session.Repo(c)
refs := fmt.Sprintf("refs/pull/%s/head", c.Param("number"))
build, err := store.GetBuildRef(c, repo, refs)
if err != nil {
c.AbortWithError(http.StatusNotFound, err)
return
}
path := fmt.Sprintf("/%s/%s/%d", repo.Owner, repo.Name, build.Number)
c.Redirect(http.StatusSeeOther, path)
}

View file

@ -1,13 +1,8 @@
package server
import (
"context"
"encoding/json"
"fmt"
"net/url"
"regexp"
"strconv"
"time"
"github.com/gin-gonic/gin"
"github.com/square/go-jose"
@ -19,22 +14,9 @@ import (
"github.com/drone/drone/shared/token"
"github.com/drone/drone/store"
"github.com/drone/drone/yaml"
"github.com/drone/envsubst"
"github.com/drone/mq/stomp"
"github.com/cncd/pipeline/pipeline/backend"
"github.com/cncd/pipeline/pipeline/frontend"
yaml2 "github.com/cncd/pipeline/pipeline/frontend/yaml"
"github.com/cncd/pipeline/pipeline/frontend/yaml/compiler"
"github.com/cncd/pipeline/pipeline/frontend/yaml/linter"
"github.com/cncd/pipeline/pipeline/frontend/yaml/matrix"
"github.com/cncd/pipeline/pipeline/rpc"
"github.com/cncd/pubsub"
"github.com/cncd/queue"
)
var skipRe = regexp.MustCompile(`\[(?i:ci *skip|skip *ci)\]`)
func PostHook(c *gin.Context) {
remote_ := remote.FromContext(c)
@ -260,390 +242,4 @@ func PostHook(c *gin.Context) {
),
)
}
}
//
// CANARY IMPLEMENTATION
//
// This file is a complete disaster because I'm trying to wedge in some
// experimental code. Please pardon our appearance during renovations.
//
func GetQueueInfo(c *gin.Context) {
c.IndentedJSON(200,
config.queue.Info(c),
)
}
func PostHook2(c *gin.Context) {
remote_ := remote.FromContext(c)
tmprepo, build, err := remote_.Hook(c.Request)
if err != nil {
log.Errorf("failure to parse hook. %s", err)
c.AbortWithError(400, err)
return
}
if build == nil {
c.Writer.WriteHeader(200)
return
}
if tmprepo == nil {
log.Errorf("failure to ascertain repo from hook.")
c.Writer.WriteHeader(400)
return
}
// skip the build if any case-insensitive combination of the words "skip" and "ci"
// wrapped in square brackets appear in the commit message
skipMatch := skipRe.FindString(build.Message)
if len(skipMatch) > 0 {
log.Infof("ignoring hook. %s found in %s", skipMatch, build.Commit)
c.Writer.WriteHeader(204)
return
}
repo, err := store.GetRepoOwnerName(c, tmprepo.Owner, tmprepo.Name)
if err != nil {
log.Errorf("failure to find repo %s/%s from hook. %s", tmprepo.Owner, tmprepo.Name, err)
c.AbortWithError(404, err)
return
}
// get the token and verify the hook is authorized
parsed, err := token.ParseRequest(c.Request, func(t *token.Token) (string, error) {
return repo.Hash, nil
})
if err != nil {
log.Errorf("failure to parse token from hook for %s. %s", repo.FullName, err)
c.AbortWithError(400, err)
return
}
if parsed.Text != repo.FullName {
log.Errorf("failure to verify token from hook. Expected %s, got %s", repo.FullName, parsed.Text)
c.AbortWithStatus(403)
return
}
if repo.UserID == 0 {
log.Warnf("ignoring hook. repo %s has no owner.", repo.FullName)
c.Writer.WriteHeader(204)
return
}
var skipped = true
if (build.Event == model.EventPush && repo.AllowPush) ||
(build.Event == model.EventPull && repo.AllowPull) ||
(build.Event == model.EventDeploy && repo.AllowDeploy) ||
(build.Event == model.EventTag && repo.AllowTag) {
skipped = false
}
if skipped {
log.Infof("ignoring hook. repo %s is disabled for %s events.", repo.FullName, build.Event)
c.Writer.WriteHeader(204)
return
}
user, err := store.GetUser(c, repo.UserID)
if err != nil {
log.Errorf("failure to find repo owner %s. %s", repo.FullName, err)
c.AbortWithError(500, err)
return
}
// if there is no email address associated with the pull request,
// we lookup the email address based on the authors github login.
//
// my initial hesitation with this code is that it has the ability
// to expose your email address. At the same time, your email address
// is already exposed in the public .git log. So while some people will
// a small number of people will probably be upset by this, I'm not sure
// it is actually that big of a deal.
if len(build.Email) == 0 {
author, uerr := store.GetUserLogin(c, build.Author)
if uerr == nil {
build.Email = author.Email
}
}
// if the remote has a refresh token, the current access token
// may be stale. Therefore, we should refresh prior to dispatching
// the job.
if refresher, ok := remote_.(remote.Refresher); ok {
ok, _ := refresher.Refresh(user)
if ok {
store.UpdateUser(c, user)
}
}
// fetch the build file from the database
cfg := ToConfig(c)
raw, err := remote_.File(user, repo, build, cfg.Yaml)
if err != nil {
log.Errorf("failure to get build config for %s. %s", repo.FullName, err)
c.AbortWithError(404, err)
return
}
sec, err := remote_.File(user, repo, build, cfg.Shasum)
if err != nil {
log.Debugf("cannot find build secrets for %s. %s", repo.FullName, err)
// NOTE we don't exit on failure. The sec file is optional
}
axes, err := matrix.Parse(raw)
if err != nil {
c.String(500, "Failed to parse yaml file or calculate matrix. %s", err)
return
}
if len(axes) == 0 {
axes = append(axes, matrix.Axis{})
}
netrc, err := remote_.Netrc(user, repo)
if err != nil {
c.String(500, "Failed to generate netrc file. %s", err)
return
}
// verify the branches can be built vs skipped
branches, err := yaml2.ParseBytes(raw)
if err != nil {
c.String(500, "Failed to parse yaml file. %s", err)
return
}
if !branches.Branches.Match(build.Branch) && build.Event != model.EventTag && build.Event != model.EventDeploy {
c.String(200, "Branch does not match restrictions defined in yaml")
return
}
signature, err := jose.ParseSigned(string(sec))
if err != nil {
log.Debugf("cannot parse .drone.yml.sig file. %s", err)
} else if len(sec) == 0 {
log.Debugf("cannot parse .drone.yml.sig file. empty file")
} else {
build.Signed = true
output, verr := signature.Verify([]byte(repo.Hash))
if verr != nil {
log.Debugf("cannot verify .drone.yml.sig file. %s", verr)
} else if string(output) != string(raw) {
log.Debugf("cannot verify .drone.yml.sig file. no match")
} else {
build.Verified = true
}
}
// update some build fields
build.Status = model.StatusPending
build.RepoID = repo.ID
// and use a transaction
var jobs []*model.Job
for num, axis := range axes {
jobs = append(jobs, &model.Job{
BuildID: build.ID,
Number: num + 1,
Status: model.StatusPending,
Environment: axis,
})
}
err = store.CreateBuild(c, build, jobs...)
if err != nil {
log.Errorf("failure to save commit for %s. %s", repo.FullName, err)
c.AbortWithError(500, err)
return
}
c.JSON(200, build)
uri := fmt.Sprintf("%s/%s/%d", httputil.GetURL(c.Request), repo.FullName, build.Number)
err = remote_.Status(user, repo, build, uri)
if err != nil {
log.Errorf("error setting commit status for %s/%d", repo.FullName, build.Number)
}
// get the previous build so that we can send
// on status change notifications
last, _ := store.GetBuildLastBefore(c, repo, build.Branch, build.ID)
secs, err := store.GetMergedSecretList(c, repo)
if err != nil {
log.Debugf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err)
}
//
// new code here
//
message := pubsub.Message{}
message.Data, _ = json.Marshal(model.Event{
Type: model.Enqueued,
Repo: *repo,
Build: *build,
})
message.Labels = map[string]string{
"repo": repo.FullName,
"private": strconv.FormatBool(repo.IsPrivate),
}
// TODO remove global reference
config.pubsub.Publish(c, "topic/events", message)
//
// workspace
//
var (
link, _ = url.Parse(repo.Link)
base = "/drone"
path = "src/" + link.Host + "/" + repo.FullName
)
for _, job := range jobs {
metadata := metadataFromStruct(repo, build, last, job, "linux/amd64")
environ := metadata.Environ()
secrets := map[string]string{}
for _, sec := range secs {
if !sec.MatchEvent(build.Event) {
continue
}
if build.Verified || sec.SkipVerify {
secrets[sec.Name] = sec.Value
}
}
sub := func(name string) string {
if v, ok := environ[name]; ok {
return v
}
return secrets[name]
}
if s, err := envsubst.Eval(string(raw), sub); err != nil {
raw = []byte(s)
}
parsed, err := yaml2.ParseBytes(raw)
if err != nil {
// TODO
}
lerr := linter.New(
linter.WithTrusted(repo.IsTrusted),
).Lint(parsed)
if lerr != nil {
// TODO
}
ir := compiler.New(
compiler.WithEnviron(environ),
compiler.WithEscalated("plugins/docker", "plugins/gcr", "plugins/ecr"),
compiler.WithLocal(false),
compiler.WithNetrc(netrc.Login, netrc.Password, netrc.Machine),
compiler.WithPrefix(
fmt.Sprintf(
"%d_%d",
job.ID,
time.Now().Unix(),
),
),
compiler.WithProxy(),
compiler.WithVolumes(), // todo set global volumes
compiler.WithWorkspace(base, path),
).Compile(parsed)
task := new(queue.Task)
task.ID = fmt.Sprint(job.ID)
task.Labels = map[string]string{}
task.Labels["platform"] = "linux/amd64"
// TODO set proper platform
// TODO set proper labels
task.Data, _ = json.Marshal(rpc.Pipeline{
ID: fmt.Sprint(job.ID),
Config: ir,
Timeout: repo.Timeout,
})
config.logger.Open(context.Background(), task.ID)
config.queue.Push(context.Background(), task)
}
}
// use helper funciton to return ([]backend.Config, error)
type builder struct {
secs []*model.Secret
repo *model.Repo
build *model.Build
last *model.Build
jobs []*model.Job
link string
}
func (b *builder) Build() ([]*backend.Config, error) {
return nil, nil
}
// return the metadata from the cli context.
func metadataFromStruct(repo *model.Repo, build, last *model.Build, job *model.Job, link string) frontend.Metadata {
return frontend.Metadata{
Repo: frontend.Repo{
Name: repo.Name,
Link: repo.Link,
Remote: repo.Clone,
Private: repo.IsPrivate,
},
Curr: frontend.Build{
Number: build.Number,
Created: build.Created,
Started: build.Started,
Finished: build.Finished,
Status: build.Status,
Event: build.Event,
Link: build.Link,
Target: build.Deploy,
Commit: frontend.Commit{
Sha: build.Commit,
Ref: build.Ref,
Refspec: build.Refspec,
Branch: build.Branch,
Message: build.Message,
Author: frontend.Author{
Name: build.Author,
Email: build.Email,
Avatar: build.Avatar,
},
},
},
Prev: frontend.Build{
Number: last.Number,
Created: last.Created,
Started: last.Started,
Finished: last.Finished,
Status: last.Status,
Event: last.Event,
Link: last.Link,
Target: last.Deploy,
Commit: frontend.Commit{
Sha: last.Commit,
Ref: last.Ref,
Refspec: last.Refspec,
Branch: last.Branch,
Message: last.Message,
Author: frontend.Author{
Name: last.Author,
Email: last.Email,
Avatar: last.Avatar,
},
},
},
Job: frontend.Job{
Number: job.Number,
Matrix: job.Environment,
},
Sys: frontend.System{
Name: "drone",
Link: link,
Arch: "linux/amd64",
},
}
}

410
server/hook2.go Normal file
View file

@ -0,0 +1,410 @@
package server
import (
"context"
"encoding/json"
"fmt"
"regexp"
"strconv"
"time"
"github.com/gin-gonic/gin"
"github.com/square/go-jose"
"github.com/Sirupsen/logrus"
"github.com/drone/drone/model"
"github.com/drone/drone/remote"
"github.com/drone/drone/shared/httputil"
"github.com/drone/drone/shared/token"
"github.com/drone/drone/store"
"github.com/drone/envsubst"
"github.com/cncd/pipeline/pipeline/backend"
"github.com/cncd/pipeline/pipeline/frontend"
"github.com/cncd/pipeline/pipeline/frontend/yaml"
"github.com/cncd/pipeline/pipeline/frontend/yaml/compiler"
"github.com/cncd/pipeline/pipeline/frontend/yaml/linter"
"github.com/cncd/pipeline/pipeline/frontend/yaml/matrix"
"github.com/cncd/pipeline/pipeline/rpc"
"github.com/cncd/pubsub"
"github.com/cncd/queue"
)
//
// CANARY IMPLEMENTATION
//
// This file is a complete disaster because I'm trying to wedge in some
// experimental code. Please pardon our appearance during renovations.
//
var skipRe = regexp.MustCompile(`\[(?i:ci *skip|skip *ci)\]`)
func GetQueueInfo(c *gin.Context) {
c.IndentedJSON(200,
config.queue.Info(c),
)
}
func PostHook2(c *gin.Context) {
remote_ := remote.FromContext(c)
tmprepo, build, err := remote_.Hook(c.Request)
if err != nil {
logrus.Errorf("failure to parse hook. %s", err)
c.AbortWithError(400, err)
return
}
if build == nil {
c.Writer.WriteHeader(200)
return
}
if tmprepo == nil {
logrus.Errorf("failure to ascertain repo from hook.")
c.Writer.WriteHeader(400)
return
}
// skip the build if any case-insensitive combination of the words "skip" and "ci"
// wrapped in square brackets appear in the commit message
skipMatch := skipRe.FindString(build.Message)
if len(skipMatch) > 0 {
logrus.Infof("ignoring hook. %s found in %s", skipMatch, build.Commit)
c.Writer.WriteHeader(204)
return
}
repo, err := store.GetRepoOwnerName(c, tmprepo.Owner, tmprepo.Name)
if err != nil {
logrus.Errorf("failure to find repo %s/%s from hook. %s", tmprepo.Owner, tmprepo.Name, err)
c.AbortWithError(404, err)
return
}
// get the token and verify the hook is authorized
parsed, err := token.ParseRequest(c.Request, func(t *token.Token) (string, error) {
return repo.Hash, nil
})
if err != nil {
logrus.Errorf("failure to parse token from hook for %s. %s", repo.FullName, err)
c.AbortWithError(400, err)
return
}
if parsed.Text != repo.FullName {
logrus.Errorf("failure to verify token from hook. Expected %s, got %s", repo.FullName, parsed.Text)
c.AbortWithStatus(403)
return
}
if repo.UserID == 0 {
logrus.Warnf("ignoring hook. repo %s has no owner.", repo.FullName)
c.Writer.WriteHeader(204)
return
}
var skipped = true
if (build.Event == model.EventPush && repo.AllowPush) ||
(build.Event == model.EventPull && repo.AllowPull) ||
(build.Event == model.EventDeploy && repo.AllowDeploy) ||
(build.Event == model.EventTag && repo.AllowTag) {
skipped = false
}
if skipped {
logrus.Infof("ignoring hook. repo %s is disabled for %s events.", repo.FullName, build.Event)
c.Writer.WriteHeader(204)
return
}
user, err := store.GetUser(c, repo.UserID)
if err != nil {
logrus.Errorf("failure to find repo owner %s. %s", repo.FullName, err)
c.AbortWithError(500, err)
return
}
// if the remote has a refresh token, the current access token
// may be stale. Therefore, we should refresh prior to dispatching
// the job.
if refresher, ok := remote_.(remote.Refresher); ok {
ok, _ := refresher.Refresh(user)
if ok {
store.UpdateUser(c, user)
}
}
// fetch the build file from the database
cfg := ToConfig(c)
raw, err := remote_.File(user, repo, build, cfg.Yaml)
if err != nil {
logrus.Errorf("failure to get build config for %s. %s", repo.FullName, err)
c.AbortWithError(404, err)
return
}
sec, err := remote_.File(user, repo, build, cfg.Shasum)
if err != nil {
logrus.Debugf("cannot find yaml signature for %s. %s", repo.FullName, err)
// NOTE we don't exit on failure. The sec file is optional
}
axes, err := matrix.Parse(raw)
if err != nil {
c.String(500, "Failed to parse yaml file or calculate matrix. %s", err)
return
}
if len(axes) == 0 {
axes = append(axes, matrix.Axis{})
}
netrc, err := remote_.Netrc(user, repo)
if err != nil {
c.String(500, "Failed to generate netrc file. %s", err)
return
}
// verify the branches can be built vs skipped
branches, err := yaml.ParseBytes(raw)
if err != nil {
c.String(500, "Failed to parse yaml file. %s", err)
return
}
if !branches.Branches.Match(build.Branch) && build.Event != model.EventTag && build.Event != model.EventDeploy {
c.String(200, "Branch does not match restrictions defined in yaml")
return
}
signature, err := jose.ParseSigned(string(sec))
if err != nil {
logrus.Debugf("cannot parse .drone.yml.sig file. %s", err)
} else if len(sec) == 0 {
logrus.Debugf("cannot parse .drone.yml.sig file. empty file")
} else {
build.Signed = true
output, verr := signature.Verify([]byte(repo.Hash))
if verr != nil {
logrus.Debugf("cannot verify .drone.yml.sig file. %s", verr)
} else if string(output) != string(raw) {
logrus.Debugf("cannot verify .drone.yml.sig file. no match")
} else {
build.Verified = true
}
}
// update some build fields
build.Status = model.StatusPending
build.RepoID = repo.ID
// and use a transaction
var jobs []*model.Job
for num, axis := range axes {
jobs = append(jobs, &model.Job{
BuildID: build.ID,
Number: num + 1,
Status: model.StatusPending,
Environment: axis,
})
}
err = store.CreateBuild(c, build, jobs...)
if err != nil {
logrus.Errorf("failure to save commit for %s. %s", repo.FullName, err)
c.AbortWithError(500, err)
return
}
c.JSON(200, build)
uri := fmt.Sprintf("%s/%s/%d", httputil.GetURL(c.Request), repo.FullName, build.Number)
err = remote_.Status(user, repo, build, uri)
if err != nil {
logrus.Errorf("error setting commit status for %s/%d", repo.FullName, build.Number)
}
// get the previous build so that we can send
// on status change notifications
last, _ := store.GetBuildLastBefore(c, repo, build.Branch, build.ID)
secs, err := store.GetMergedSecretList(c, repo)
if err != nil {
logrus.Debugf("Error getting secrets for %s#%d. %s", repo.FullName, build.Number, err)
}
//
// new code here
//
message := pubsub.Message{
Labels: map[string]string{
"repo": repo.FullName,
"private": strconv.FormatBool(repo.IsPrivate),
},
}
message.Data, _ = json.Marshal(model.Event{
Type: model.Enqueued,
Repo: *repo,
Build: *build,
})
// TODO remove global reference
config.pubsub.Publish(c, "topic/events", message)
//
// workspace
//
for _, job := range jobs {
metadata := metadataFromStruct(repo, build, last, job, "linux/amd64")
environ := metadata.Environ()
secrets := map[string]string{}
for _, sec := range secs {
if !sec.MatchEvent(build.Event) {
continue
}
if build.Verified || sec.SkipVerify {
secrets[sec.Name] = sec.Value
}
}
sub := func(name string) string {
if v, ok := environ[name]; ok {
return v
}
return secrets[name]
}
if s, err := envsubst.Eval(string(raw), sub); err != nil {
raw = []byte(s)
}
parsed, err := yaml.ParseBytes(raw)
if err != nil {
job.ExitCode = 255
job.Enqueued = time.Now().Unix()
job.Started = time.Now().Unix()
job.Finished = time.Now().Unix()
job.Error = err.Error()
store.UpdateBuildJob(c, build, job)
continue
}
lerr := linter.New(
linter.WithTrusted(repo.IsTrusted),
).Lint(parsed)
if lerr != nil {
job.ExitCode = 255
job.Enqueued = time.Now().Unix()
job.Started = time.Now().Unix()
job.Finished = time.Now().Unix()
job.Error = lerr.Error()
store.UpdateBuildJob(c, build, job)
continue
}
ir := compiler.New(
compiler.WithEnviron(environ),
compiler.WithEscalated("plugins/docker", "plugins/gcr", "plugins/ecr"),
compiler.WithLocal(false),
compiler.WithNetrc(netrc.Login, netrc.Password, netrc.Machine),
compiler.WithPrefix(
fmt.Sprintf(
"%d_%d",
job.ID,
time.Now().Unix(),
),
),
compiler.WithProxy(),
compiler.WithVolumes(), // todo set global volumes
compiler.WithWorkspaceFromURL("/drone", repo.Link),
).Compile(parsed)
task := new(queue.Task)
task.ID = fmt.Sprint(job.ID)
task.Labels = map[string]string{}
task.Labels["platform"] = "linux/amd64"
// TODO set proper platform
// TODO set proper labels
task.Data, _ = json.Marshal(rpc.Pipeline{
ID: fmt.Sprint(job.ID),
Config: ir,
Timeout: repo.Timeout,
})
config.logger.Open(context.Background(), task.ID)
config.queue.Push(context.Background(), task)
}
}
// return the metadata from the cli context.
func metadataFromStruct(repo *model.Repo, build, last *model.Build, job *model.Job, link string) frontend.Metadata {
return frontend.Metadata{
Repo: frontend.Repo{
Name: repo.Name,
Link: repo.Link,
Remote: repo.Clone,
Private: repo.IsPrivate,
},
Curr: frontend.Build{
Number: build.Number,
Created: build.Created,
Started: build.Started,
Finished: build.Finished,
Status: build.Status,
Event: build.Event,
Link: build.Link,
Target: build.Deploy,
Commit: frontend.Commit{
Sha: build.Commit,
Ref: build.Ref,
Refspec: build.Refspec,
Branch: build.Branch,
Message: build.Message,
Author: frontend.Author{
Name: build.Author,
Email: build.Email,
Avatar: build.Avatar,
},
},
},
Prev: frontend.Build{
Number: last.Number,
Created: last.Created,
Started: last.Started,
Finished: last.Finished,
Status: last.Status,
Event: last.Event,
Link: last.Link,
Target: last.Deploy,
Commit: frontend.Commit{
Sha: last.Commit,
Ref: last.Ref,
Refspec: last.Refspec,
Branch: last.Branch,
Message: last.Message,
Author: frontend.Author{
Name: last.Author,
Email: last.Email,
Avatar: last.Avatar,
},
},
},
Job: frontend.Job{
Number: job.Number,
Matrix: job.Environment,
},
Sys: frontend.System{
Name: "drone",
Link: link,
Arch: "linux/amd64",
},
}
}
// use helper funciton to return ([]backend.Config, error)
type builder struct {
secs []*model.Secret
repo *model.Repo
build *model.Build
last *model.Build
jobs []*model.Job
link string
}
func (b *builder) Build() ([]*backend.Config, error) {
return nil, nil
}

View file

@ -0,0 +1,8 @@
-- +migrate Up
ALTER TABLE builds ADD COLUMN build_error VARCHAR(500);
UPDATE builds SET build_error = '' WHERE job_error = null;
-- +migrate Down
ALTER TABLE builds DROP COLUMN build_error;

View file

@ -0,0 +1,8 @@
-- +migrate Up
ALTER TABLE builds ADD COLUMN build_error VARCHAR(500);
UPDATE builds SET build_error = '';
-- +migrate Down
ALTER TABLE builds DROP COLUMN build_error;

View file

@ -0,0 +1,8 @@
-- +migrate Up
ALTER TABLE builds ADD COLUMN build_error TEXT;
UPDATE builds SET build_error = '';
-- +migrate Down
ALTER TABLE builds DROP COLUMN build_error;

View file

@ -1,7 +1,9 @@
package compiler
import (
"net/url"
"os"
"path/filepath"
"strings"
"github.com/cncd/pipeline/pipeline/frontend"
@ -56,6 +58,17 @@ func WithWorkspace(base, path string) Option {
}
}
// WithWorkspaceFromURL configures the compiler with the workspace
// base and path based on the repository url.
func WithWorkspaceFromURL(base, link string) Option {
path := "src"
parsed, err := url.Parse(link)
if err == nil {
path = filepath.Join(path, parsed.Host, parsed.Path)
}
return WithWorkspace(base, path)
}
// WithEscalated configures the compiler to automatically execute
// images as privileged containers if the match the given list.
func WithEscalated(images ...string) Option {

2
vendor/vendor.json vendored
View file

@ -61,7 +61,7 @@
"revisionTime": "2017-03-05T09:53:47Z"
},
{
"checksumSHA1": "e1lZWQdObXCKWqZOGlOeaeERQMc=",
"checksumSHA1": "+4c/I/PEDCgzog8m4ohw1parhgE=",
"path": "github.com/cncd/pipeline/pipeline/frontend/yaml/compiler",
"revision": "d4e09fd3021a16408bc3ebdd3500efd28f51e72c",
"revisionTime": "2017-03-05T09:53:47Z"