mirror of
https://github.com/woodpecker-ci/woodpecker.git
synced 2025-02-05 14:12:22 +00:00
Rework log streaming and related functions (#1802)
closes #1801 closes #1815 closes #1144 closes #983 closes #557 closes #1827 regression of #1791 # TODO - [x] adjust log model - [x] add migration for logs - [x] send log line via grpc using step-id - [x] save log-line to db - [x] stream log-lines to UI - [x] use less structs for log-data - [x] make web UI work - [x] display logs loaded from db - [x] display streaming logs - [ ] ~~make migration work~~ -> dedicated pull (#1828) # TESTED - [x] new logs are stored in database - [x] log retrieval via cli (of new logs) works - [x] log streaming works (tested via curl & webui) - [x] log retrieval via web (of new logs) works --------- Co-authored-by: 6543 <6543@obermui.de>
This commit is contained in:
parent
971cb52032
commit
556607b525
49 changed files with 1066 additions and 990 deletions
|
@ -50,9 +50,8 @@ func (r *Runner) createLogger(_ context.Context, logger zerolog.Logger, uploads
|
|||
|
||||
loglogger.Debug().Msg("log stream opened")
|
||||
|
||||
limitedPart := io.LimitReader(part, maxLogsUpload)
|
||||
logStream := rpc.NewLineWriter(r.client, work.ID, step.Alias, secrets...)
|
||||
if _, err := io.Copy(logStream, limitedPart); err != nil {
|
||||
logStream := rpc.NewLineWriter(r.client, step.UUID, secrets...)
|
||||
if _, err := io.Copy(logStream, part); err != nil {
|
||||
log.Error().Err(err).Msg("copy limited logStream part")
|
||||
}
|
||||
|
||||
|
|
|
@ -278,14 +278,14 @@ func (c *client) Update(ctx context.Context, id string, state rpc.State) (err er
|
|||
}
|
||||
|
||||
// Log writes the pipeline log entry.
|
||||
func (c *client) Log(ctx context.Context, id string, line *rpc.Line) (err error) {
|
||||
func (c *client) Log(ctx context.Context, logEntry *rpc.LogEntry) (err error) {
|
||||
req := new(proto.LogRequest)
|
||||
req.Id = id
|
||||
req.Line = new(proto.Line)
|
||||
req.Line.Out = line.Out
|
||||
req.Line.Pos = int32(line.Pos)
|
||||
req.Line.Step = line.Step
|
||||
req.Line.Time = line.Time
|
||||
req.LogEntry = new(proto.LogEntry)
|
||||
req.LogEntry.StepUuid = logEntry.StepUUID
|
||||
req.LogEntry.Data = logEntry.Data
|
||||
req.LogEntry.Line = int32(logEntry.Line)
|
||||
req.LogEntry.Time = logEntry.Time
|
||||
req.LogEntry.Type = int32(logEntry.Type)
|
||||
for {
|
||||
_, err = c.client.Log(ctx, req)
|
||||
if err == nil {
|
||||
|
|
|
@ -31,14 +31,6 @@ import (
|
|||
"github.com/woodpecker-ci/woodpecker/shared/utils"
|
||||
)
|
||||
|
||||
// TODO: Implement log streaming.
|
||||
// Until now we need to limit the size of the logs and files that we upload.
|
||||
// The maximum grpc payload size is 4194304. So we need to set these limits below the maximum.
|
||||
const (
|
||||
maxLogsUpload = 2000000 // this is per step
|
||||
maxFileUpload = 1000000
|
||||
)
|
||||
|
||||
type Runner struct {
|
||||
client rpc.Peer
|
||||
filter rpc.Filter
|
||||
|
|
|
@ -248,7 +248,7 @@ var defaultLogger = pipeline.LogFunc(func(step *backendTypes.Step, rc multipart.
|
|||
return err
|
||||
}
|
||||
|
||||
logStream := NewLineWriter(step.Alias)
|
||||
logStream := NewLineWriter(step.Alias, step.UUID)
|
||||
_, err = io.Copy(logStream, part)
|
||||
return err
|
||||
})
|
||||
|
|
|
@ -19,60 +19,44 @@ import (
|
|||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
// Identifies the type of line in the logs.
|
||||
const (
|
||||
LineStdout int = iota
|
||||
LineStderr
|
||||
LineExitCode
|
||||
LineMetadata
|
||||
LineProgress
|
||||
"github.com/woodpecker-ci/woodpecker/pipeline/rpc"
|
||||
)
|
||||
|
||||
// Line is a line of console output.
|
||||
type Line struct {
|
||||
Step string `json:"step,omitempty"`
|
||||
Time int64 `json:"time,omitempty"`
|
||||
Type int `json:"type,omitempty"`
|
||||
Pos int `json:"pos,omitempty"`
|
||||
Out string `json:"out,omitempty"`
|
||||
}
|
||||
|
||||
// LineWriter sends logs to the client.
|
||||
type LineWriter struct {
|
||||
name string
|
||||
num int
|
||||
now time.Time
|
||||
rep *strings.Replacer
|
||||
lines []*Line
|
||||
stepName string
|
||||
stepUUID string
|
||||
num int
|
||||
now time.Time
|
||||
rep *strings.Replacer
|
||||
lines []*rpc.LogEntry
|
||||
}
|
||||
|
||||
// NewLineWriter returns a new line reader.
|
||||
func NewLineWriter(name string) *LineWriter {
|
||||
w := new(LineWriter)
|
||||
w.name = name
|
||||
w.num = 0
|
||||
w.now = time.Now().UTC()
|
||||
|
||||
return w
|
||||
func NewLineWriter(stepName, stepUUID string) *LineWriter {
|
||||
return &LineWriter{
|
||||
stepName: stepName,
|
||||
stepUUID: stepUUID,
|
||||
now: time.Now().UTC(),
|
||||
}
|
||||
}
|
||||
|
||||
func (w *LineWriter) Write(p []byte) (n int, err error) {
|
||||
out := string(p)
|
||||
data := string(p)
|
||||
if w.rep != nil {
|
||||
out = w.rep.Replace(out)
|
||||
data = w.rep.Replace(data)
|
||||
}
|
||||
|
||||
line := &Line{
|
||||
Out: out,
|
||||
Step: w.name,
|
||||
Pos: w.num,
|
||||
Time: int64(time.Since(w.now).Seconds()),
|
||||
Type: LineStdout,
|
||||
line := &rpc.LogEntry{
|
||||
Data: data,
|
||||
StepUUID: w.stepUUID,
|
||||
Line: w.num,
|
||||
Time: int64(time.Since(w.now).Seconds()),
|
||||
Type: rpc.LogEntryStdout,
|
||||
}
|
||||
|
||||
fmt.Fprintf(os.Stderr, "[%s:L%d:%ds] %s", w.name, w.num, int64(time.Since(w.now).Seconds()), out)
|
||||
fmt.Fprintf(os.Stderr, "[%s:L%d:%ds] %s", w.stepName, w.num, int64(time.Since(w.now).Seconds()), data)
|
||||
|
||||
w.num++
|
||||
|
||||
|
|
|
@ -27,7 +27,7 @@ import (
|
|||
var pipelineLogsCmd = &cli.Command{
|
||||
Name: "logs",
|
||||
Usage: "show pipeline logs",
|
||||
ArgsUsage: "<repo/name> [pipeline] [step]",
|
||||
ArgsUsage: "<repo/name> [pipeline] [stepID]",
|
||||
Action: pipelineLogs,
|
||||
Flags: common.GlobalFlags,
|
||||
}
|
||||
|
@ -54,13 +54,13 @@ func pipelineLogs(c *cli.Context) error {
|
|||
return err
|
||||
}
|
||||
|
||||
logs, err := client.PipelineLogs(owner, name, number, step)
|
||||
logs, err := client.StepLogEntries(owner, name, number, step)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, log := range logs {
|
||||
fmt.Print(log.Output)
|
||||
fmt.Print(string(log.Data))
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
@ -751,6 +751,52 @@ const docTemplate = `{
|
|||
}
|
||||
}
|
||||
},
|
||||
"/logs/{owner}/{name}/{pipeline}/{stepID}": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"Pipeline logs"
|
||||
],
|
||||
"summary": "Log stream",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"description": "the repository owner's name",
|
||||
"name": "owner",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "the repository name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the number of the pipeline",
|
||||
"name": "pipeline",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the step id",
|
||||
"name": "stepID",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/orgs/{owner}/permissions": {
|
||||
"get": {
|
||||
"produces": [
|
||||
|
@ -1795,10 +1841,10 @@ const docTemplate = `{
|
|||
}
|
||||
}
|
||||
},
|
||||
"/repos/{owner}/{name}/logs/{number}/{pid}": {
|
||||
"/repos/{owner}/{name}/logs/{number}/{stepID}": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"text/plain"
|
||||
"application/json"
|
||||
],
|
||||
"tags": [
|
||||
"Pipeline logs"
|
||||
|
@ -1836,76 +1882,21 @@ const docTemplate = `{
|
|||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the pipeline id",
|
||||
"name": "pid",
|
||||
"description": "the step id",
|
||||
"name": "stepID",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/repos/{owner}/{name}/logs/{number}/{pid}/{step}": {
|
||||
"get": {
|
||||
"produces": [
|
||||
"text/plain"
|
||||
],
|
||||
"tags": [
|
||||
"Pipeline logs"
|
||||
],
|
||||
"summary": "Log information per step",
|
||||
"parameters": [
|
||||
{
|
||||
"type": "string",
|
||||
"default": "Bearer \u003cpersonal access token\u003e",
|
||||
"description": "Insert your personal access token",
|
||||
"name": "Authorization",
|
||||
"in": "header",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "the repository owner's name",
|
||||
"name": "owner",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "string",
|
||||
"description": "the repository name",
|
||||
"name": "name",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the number of the pipeline",
|
||||
"name": "number",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the pipeline id",
|
||||
"name": "pid",
|
||||
"in": "path",
|
||||
"required": true
|
||||
},
|
||||
{
|
||||
"type": "integer",
|
||||
"description": "the step name",
|
||||
"name": "step",
|
||||
"in": "path",
|
||||
"required": true
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "OK"
|
||||
"description": "OK",
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/definitions/LogEntry"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3800,6 +3791,32 @@ const docTemplate = `{
|
|||
}
|
||||
}
|
||||
},
|
||||
"LogEntry": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"data": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "integer"
|
||||
}
|
||||
},
|
||||
"id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"line": {
|
||||
"type": "integer"
|
||||
},
|
||||
"step_id": {
|
||||
"type": "integer"
|
||||
},
|
||||
"time": {
|
||||
"type": "integer"
|
||||
},
|
||||
"type": {
|
||||
"$ref": "#/definitions/model.LogEntryType"
|
||||
}
|
||||
}
|
||||
},
|
||||
"OrgPerm": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
|
@ -4233,6 +4250,9 @@ const docTemplate = `{
|
|||
},
|
||||
"state": {
|
||||
"$ref": "#/definitions/StatusValue"
|
||||
},
|
||||
"uuid": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -4320,6 +4340,23 @@ const docTemplate = `{
|
|||
"EventCron",
|
||||
"EventManual"
|
||||
]
|
||||
},
|
||||
"model.LogEntryType": {
|
||||
"type": "integer",
|
||||
"enum": [
|
||||
0,
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4
|
||||
],
|
||||
"x-enum-varnames": [
|
||||
"LogEntryStdout",
|
||||
"LogEntryStderr",
|
||||
"LogEntryExitCode",
|
||||
"LogEntryMetadata",
|
||||
"LogEntryProgress"
|
||||
]
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
|
2
go.mod
2
go.mod
|
@ -23,6 +23,7 @@ require (
|
|||
github.com/golang-jwt/jwt/v4 v4.5.0
|
||||
github.com/google/go-github/v39 v39.2.0
|
||||
github.com/google/tink/go v1.7.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/gorilla/securecookie v1.1.1
|
||||
github.com/joho/godotenv v1.5.1
|
||||
github.com/lafriks/ttlcache/v3 v3.2.0
|
||||
|
@ -94,7 +95,6 @@ require (
|
|||
github.com/google/go-cmp v0.5.9 // indirect
|
||||
github.com/google/go-querystring v1.1.0 // indirect
|
||||
github.com/google/gofuzz v1.1.0 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
|
||||
github.com/hashicorp/go-hclog v1.2.0 // indirect
|
||||
github.com/hashicorp/go-retryablehttp v0.7.2 // indirect
|
||||
|
|
|
@ -30,6 +30,7 @@ import (
|
|||
func toConfig(step *types.Step) *container.Config {
|
||||
config := &container.Config{
|
||||
Image: step.Image,
|
||||
Labels: map[string]string{"wp_uuid": step.UUID},
|
||||
WorkingDir: step.WorkingDir,
|
||||
AttachStdout: true,
|
||||
AttachStderr: true,
|
||||
|
|
|
@ -3,6 +3,7 @@ package types
|
|||
// Step defines a container process.
|
||||
type Step struct {
|
||||
Name string `json:"name"`
|
||||
UUID string `json:"uuid"`
|
||||
Alias string `json:"alias,omitempty"`
|
||||
Image string `json:"image,omitempty"`
|
||||
Pull bool `json:"pull,omitempty"`
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
backend_types "github.com/woodpecker-ci/woodpecker/pipeline/backend/types"
|
||||
|
@ -16,6 +17,8 @@ import (
|
|||
|
||||
func (c *Compiler) createProcess(name string, container *yaml_types.Container, section string) *backend_types.Step {
|
||||
var (
|
||||
uuid = uuid.New()
|
||||
|
||||
detached bool
|
||||
workingdir string
|
||||
|
||||
|
@ -153,6 +156,7 @@ func (c *Compiler) createProcess(name string, container *yaml_types.Container, s
|
|||
|
||||
return &backend_types.Step{
|
||||
Name: name,
|
||||
UUID: uuid.String(),
|
||||
Alias: container.Name,
|
||||
Image: container.Image,
|
||||
Pull: container.Pull,
|
||||
|
|
|
@ -1,121 +0,0 @@
|
|||
// Copyright 2022 Woodpecker Authors
|
||||
// Copyright 2011 Drone.IO Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rpc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/pipeline/shared"
|
||||
)
|
||||
|
||||
// Identifies the type of line in the logs.
|
||||
const (
|
||||
LineStdout int = iota
|
||||
LineStderr
|
||||
LineExitCode
|
||||
LineMetadata
|
||||
LineProgress
|
||||
)
|
||||
|
||||
// Line is a line of console output.
|
||||
type Line struct {
|
||||
Step string `json:"step,omitempty"`
|
||||
Time int64 `json:"time,omitempty"`
|
||||
Type int `json:"type,omitempty"`
|
||||
Pos int `json:"pos,omitempty"`
|
||||
Out string `json:"out,omitempty"`
|
||||
}
|
||||
|
||||
func (l *Line) String() string {
|
||||
switch l.Type {
|
||||
case LineExitCode:
|
||||
return fmt.Sprintf("[%s] exit code %s", l.Step, l.Out)
|
||||
default:
|
||||
return fmt.Sprintf("[%s:L%v:%vs] %s", l.Step, l.Pos, l.Time, l.Out)
|
||||
}
|
||||
}
|
||||
|
||||
// LineWriter sends logs to the client.
|
||||
type LineWriter struct {
|
||||
peer Peer
|
||||
id string
|
||||
name string
|
||||
num int
|
||||
now time.Time
|
||||
rep *strings.Replacer
|
||||
lines []*Line
|
||||
}
|
||||
|
||||
// NewLineWriter returns a new line reader.
|
||||
func NewLineWriter(peer Peer, id, name string, secret ...string) *LineWriter {
|
||||
return &LineWriter{
|
||||
peer: peer,
|
||||
id: id,
|
||||
name: name,
|
||||
now: time.Now().UTC(),
|
||||
rep: shared.NewSecretsReplacer(secret),
|
||||
lines: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (w *LineWriter) Write(p []byte) (n int, err error) {
|
||||
out := string(p)
|
||||
if w.rep != nil {
|
||||
out = w.rep.Replace(out)
|
||||
}
|
||||
log.Trace().Str("name", w.name).Str("ID", w.id).Msgf("grpc write line: %s", out)
|
||||
|
||||
line := &Line{
|
||||
Out: out,
|
||||
Step: w.name,
|
||||
Pos: w.num,
|
||||
Time: int64(time.Since(w.now).Seconds()),
|
||||
Type: LineStdout,
|
||||
}
|
||||
if err := w.peer.Log(context.Background(), w.id, line); err != nil {
|
||||
log.Error().Err(err).Msgf("fail to write pipeline log to peer '%s'", w.id)
|
||||
}
|
||||
w.num++
|
||||
|
||||
// for _, part := range bytes.Split(p, []byte{'\n'}) {
|
||||
// line := &Line{
|
||||
// Out: string(part),
|
||||
// Step: w.name,
|
||||
// Pos: w.num,
|
||||
// Time: int64(time.Since(w.now).Seconds()),
|
||||
// Type: LineStdout,
|
||||
// }
|
||||
// w.peer.Log(context.Background(), w.id, line)
|
||||
// w.num++
|
||||
// }
|
||||
w.lines = append(w.lines, line)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// Lines returns the line history
|
||||
func (w *LineWriter) Lines() []*Line {
|
||||
return w.lines
|
||||
}
|
||||
|
||||
// Clear clears the line history
|
||||
func (w *LineWriter) Clear() {
|
||||
w.lines = w.lines[:0]
|
||||
}
|
108
pipeline/rpc/log_entry.go
Normal file
108
pipeline/rpc/log_entry.go
Normal file
|
@ -0,0 +1,108 @@
|
|||
// Copyright 2022 Woodpecker Authors
|
||||
// Copyright 2011 Drone.IO Inc.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package rpc
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/pipeline/shared"
|
||||
)
|
||||
|
||||
// Identifies the type of line in the logs.
|
||||
const (
|
||||
LogEntryStdout int = iota
|
||||
LogEntryStderr
|
||||
LogEntryExitCode
|
||||
LogEntryMetadata
|
||||
LogEntryProgress
|
||||
)
|
||||
|
||||
// Line is a line of console output.
|
||||
type LogEntry struct {
|
||||
StepUUID string `json:"step_uuid,omitempty"`
|
||||
Time int64 `json:"time,omitempty"`
|
||||
Type int `json:"type,omitempty"`
|
||||
Line int `json:"line,omitempty"`
|
||||
Data string `json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func (l *LogEntry) String() string {
|
||||
switch l.Type {
|
||||
case LogEntryExitCode:
|
||||
return fmt.Sprintf("[%s] exit code %s", l.StepUUID, l.Data)
|
||||
default:
|
||||
return fmt.Sprintf("[%s:L%v:%vs] %s", l.StepUUID, l.Line, l.Time, l.Data)
|
||||
}
|
||||
}
|
||||
|
||||
// LineWriter sends logs to the client.
|
||||
type LineWriter struct {
|
||||
peer Peer
|
||||
stepUUID string
|
||||
num int
|
||||
now time.Time
|
||||
rep *strings.Replacer
|
||||
lines []*LogEntry
|
||||
}
|
||||
|
||||
// NewLineWriter returns a new line reader.
|
||||
func NewLineWriter(peer Peer, stepUUID string, secret ...string) *LineWriter {
|
||||
return &LineWriter{
|
||||
peer: peer,
|
||||
stepUUID: stepUUID,
|
||||
now: time.Now().UTC(),
|
||||
rep: shared.NewSecretsReplacer(secret),
|
||||
lines: nil,
|
||||
}
|
||||
}
|
||||
|
||||
func (w *LineWriter) Write(p []byte) (n int, err error) {
|
||||
data := string(p)
|
||||
if w.rep != nil {
|
||||
data = w.rep.Replace(data)
|
||||
}
|
||||
log.Trace().Str("step-uuid", w.stepUUID).Msgf("grpc write line: %s", data)
|
||||
|
||||
line := &LogEntry{
|
||||
Data: data,
|
||||
StepUUID: w.stepUUID,
|
||||
Time: int64(time.Since(w.now).Seconds()),
|
||||
Type: LogEntryStdout,
|
||||
Line: w.num,
|
||||
}
|
||||
if err := w.peer.Log(context.Background(), line); err != nil {
|
||||
log.Error().Err(err).Str("step-uuid", w.stepUUID).Msg("fail to write pipeline log to peer")
|
||||
}
|
||||
w.num++
|
||||
|
||||
w.lines = append(w.lines, line)
|
||||
return len(p), nil
|
||||
}
|
||||
|
||||
// Lines returns the line history
|
||||
func (w *LineWriter) Lines() []*LogEntry {
|
||||
return w.lines
|
||||
}
|
||||
|
||||
// Clear clears the line history
|
||||
func (w *LineWriter) Clear() {
|
||||
w.lines = w.lines[:0]
|
||||
}
|
|
@ -18,14 +18,14 @@ import (
|
|||
"testing"
|
||||
)
|
||||
|
||||
func TestLine(t *testing.T) {
|
||||
line := Line{
|
||||
Step: "redis",
|
||||
Time: 60,
|
||||
Pos: 1,
|
||||
Out: "starting redis server",
|
||||
func TestLogEntry(t *testing.T) {
|
||||
line := LogEntry{
|
||||
StepUUID: "e9ea76a5-44a1-4059-9c4a-6956c478b26d",
|
||||
Time: 60,
|
||||
Line: 1,
|
||||
Data: "starting redis server",
|
||||
}
|
||||
got, want := line.String(), "[redis:L1:60s] starting redis server"
|
||||
got, want := line.String(), "[e9ea76a5-44a1-4059-9c4a-6956c478b26d:L1:60s] starting redis server"
|
||||
if got != want {
|
||||
t.Errorf("Wanted line string %q, got %q", want, got)
|
||||
}
|
|
@ -74,7 +74,7 @@ type Peer interface {
|
|||
Update(c context.Context, id string, state State) error
|
||||
|
||||
// Log writes the pipeline log entry.
|
||||
Log(c context.Context, id string, line *Line) error
|
||||
Log(c context.Context, logEntry *LogEntry) error
|
||||
|
||||
// RegisterAgent register our agent to the server
|
||||
RegisterAgent(ctx context.Context, platform, backend, version string, capacity int) (int64, error)
|
||||
|
|
|
@ -16,4 +16,4 @@ package proto
|
|||
|
||||
// Version is the version of the woodpecker.proto file,
|
||||
// !IMPORTANT! increased by 1 each time it get changed !IMPORTANT!
|
||||
const Version int32 = 2
|
||||
const Version int32 = 3
|
||||
|
|
|
@ -122,19 +122,20 @@ func (x *State) GetError() string {
|
|||
return ""
|
||||
}
|
||||
|
||||
type Line struct {
|
||||
type LogEntry struct {
|
||||
state protoimpl.MessageState
|
||||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Step string `protobuf:"bytes,1,opt,name=step,proto3" json:"step,omitempty"`
|
||||
Time int64 `protobuf:"varint,2,opt,name=time,proto3" json:"time,omitempty"`
|
||||
Pos int32 `protobuf:"varint,3,opt,name=pos,proto3" json:"pos,omitempty"`
|
||||
Out string `protobuf:"bytes,4,opt,name=out,proto3" json:"out,omitempty"`
|
||||
StepUuid string `protobuf:"bytes,1,opt,name=step_uuid,json=stepUuid,proto3" json:"step_uuid,omitempty"`
|
||||
Time int64 `protobuf:"varint,2,opt,name=time,proto3" json:"time,omitempty"`
|
||||
Line int32 `protobuf:"varint,3,opt,name=line,proto3" json:"line,omitempty"`
|
||||
Type int32 `protobuf:"varint,4,opt,name=type,proto3" json:"type,omitempty"` // 0 = stdout, 1 = stderr, 2 = exit-code, 3 = metadata, 4 = progress
|
||||
Data string `protobuf:"bytes,5,opt,name=data,proto3" json:"data,omitempty"`
|
||||
}
|
||||
|
||||
func (x *Line) Reset() {
|
||||
*x = Line{}
|
||||
func (x *LogEntry) Reset() {
|
||||
*x = LogEntry{}
|
||||
if protoimpl.UnsafeEnabled {
|
||||
mi := &file_woodpecker_proto_msgTypes[1]
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
|
@ -142,13 +143,13 @@ func (x *Line) Reset() {
|
|||
}
|
||||
}
|
||||
|
||||
func (x *Line) String() string {
|
||||
func (x *LogEntry) String() string {
|
||||
return protoimpl.X.MessageStringOf(x)
|
||||
}
|
||||
|
||||
func (*Line) ProtoMessage() {}
|
||||
func (*LogEntry) ProtoMessage() {}
|
||||
|
||||
func (x *Line) ProtoReflect() protoreflect.Message {
|
||||
func (x *LogEntry) ProtoReflect() protoreflect.Message {
|
||||
mi := &file_woodpecker_proto_msgTypes[1]
|
||||
if protoimpl.UnsafeEnabled && x != nil {
|
||||
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
|
||||
|
@ -160,35 +161,42 @@ func (x *Line) ProtoReflect() protoreflect.Message {
|
|||
return mi.MessageOf(x)
|
||||
}
|
||||
|
||||
// Deprecated: Use Line.ProtoReflect.Descriptor instead.
|
||||
func (*Line) Descriptor() ([]byte, []int) {
|
||||
// Deprecated: Use LogEntry.ProtoReflect.Descriptor instead.
|
||||
func (*LogEntry) Descriptor() ([]byte, []int) {
|
||||
return file_woodpecker_proto_rawDescGZIP(), []int{1}
|
||||
}
|
||||
|
||||
func (x *Line) GetStep() string {
|
||||
func (x *LogEntry) GetStepUuid() string {
|
||||
if x != nil {
|
||||
return x.Step
|
||||
return x.StepUuid
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *Line) GetTime() int64 {
|
||||
func (x *LogEntry) GetTime() int64 {
|
||||
if x != nil {
|
||||
return x.Time
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Line) GetPos() int32 {
|
||||
func (x *LogEntry) GetLine() int32 {
|
||||
if x != nil {
|
||||
return x.Pos
|
||||
return x.Line
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *Line) GetOut() string {
|
||||
func (x *LogEntry) GetType() int32 {
|
||||
if x != nil {
|
||||
return x.Out
|
||||
return x.Type
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
func (x *LogEntry) GetData() string {
|
||||
if x != nil {
|
||||
return x.Data
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
@ -614,8 +622,7 @@ type LogRequest struct {
|
|||
sizeCache protoimpl.SizeCache
|
||||
unknownFields protoimpl.UnknownFields
|
||||
|
||||
Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"`
|
||||
Line *Line `protobuf:"bytes,2,opt,name=line,proto3" json:"line,omitempty"`
|
||||
LogEntry *LogEntry `protobuf:"bytes,1,opt,name=logEntry,proto3" json:"logEntry,omitempty"`
|
||||
}
|
||||
|
||||
func (x *LogRequest) Reset() {
|
||||
|
@ -650,16 +657,9 @@ func (*LogRequest) Descriptor() ([]byte, []int) {
|
|||
return file_woodpecker_proto_rawDescGZIP(), []int{10}
|
||||
}
|
||||
|
||||
func (x *LogRequest) GetId() string {
|
||||
func (x *LogRequest) GetLogEntry() *LogEntry {
|
||||
if x != nil {
|
||||
return x.Id
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (x *LogRequest) GetLine() *Line {
|
||||
if x != nil {
|
||||
return x.Line
|
||||
return x.LogEntry
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -1101,126 +1101,128 @@ var file_woodpecker_proto_rawDesc = []byte{
|
|||
0x74, 0x61, 0x72, 0x74, 0x65, 0x64, 0x12, 0x1a, 0x0a, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68,
|
||||
0x65, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x03, 0x52, 0x08, 0x66, 0x69, 0x6e, 0x69, 0x73, 0x68,
|
||||
0x65, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x18, 0x06, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x52, 0x0a, 0x04, 0x4c, 0x69, 0x6e, 0x65,
|
||||
0x12, 0x12, 0x0a, 0x04, 0x73, 0x74, 0x65, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04,
|
||||
0x73, 0x74, 0x65, 0x70, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x03, 0x52, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x70, 0x6f, 0x73, 0x18,
|
||||
0x03, 0x20, 0x01, 0x28, 0x05, 0x52, 0x03, 0x70, 0x6f, 0x73, 0x12, 0x10, 0x0a, 0x03, 0x6f, 0x75,
|
||||
0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6f, 0x75, 0x74, 0x22, 0x76, 0x0a, 0x06,
|
||||
0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73,
|
||||
0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46,
|
||||
0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72,
|
||||
0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39, 0x0a, 0x0b, 0x4c, 0x61, 0x62,
|
||||
0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61,
|
||||
0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
|
||||
0x3a, 0x02, 0x38, 0x01, 0x22, 0x4e, 0x0a, 0x08, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65,
|
||||
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
|
||||
0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61,
|
||||
0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x61, 0x79,
|
||||
0x6c, 0x6f, 0x61, 0x64, 0x22, 0x34, 0x0a, 0x0b, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x74,
|
||||
0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22, 0x41, 0x0a, 0x0b, 0x49, 0x6e,
|
||||
0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61,
|
||||
0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x1d, 0x0a,
|
||||
0x0b, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02,
|
||||
0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x41, 0x0a, 0x0b,
|
||||
0x44, 0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69,
|
||||
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73,
|
||||
0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22,
|
||||
0x1f, 0x0a, 0x0d, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
|
||||
0x22, 0x43, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x09, 0x52, 0x05, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x22, 0x77, 0x0a, 0x08, 0x4c, 0x6f, 0x67, 0x45,
|
||||
0x6e, 0x74, 0x72, 0x79, 0x12, 0x1b, 0x0a, 0x09, 0x73, 0x74, 0x65, 0x70, 0x5f, 0x75, 0x75, 0x69,
|
||||
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x73, 0x74, 0x65, 0x70, 0x55, 0x75, 0x69,
|
||||
0x64, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52,
|
||||
0x04, 0x74, 0x69, 0x6d, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x03, 0x20,
|
||||
0x01, 0x28, 0x05, 0x52, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70,
|
||||
0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x05, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x12, 0x0a,
|
||||
0x04, 0x64, 0x61, 0x74, 0x61, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x64, 0x61, 0x74,
|
||||
0x61, 0x22, 0x76, 0x0a, 0x06, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x12, 0x31, 0x0a, 0x06, 0x6c,
|
||||
0x61, 0x62, 0x65, 0x6c, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x19, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x2e, 0x4c, 0x61, 0x62, 0x65, 0x6c,
|
||||
0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x06, 0x6c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x1a, 0x39,
|
||||
0x0a, 0x0b, 0x4c, 0x61, 0x62, 0x65, 0x6c, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a,
|
||||
0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12,
|
||||
0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05,
|
||||
0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x4e, 0x0a, 0x08, 0x50, 0x69, 0x70,
|
||||
0x65, 0x6c, 0x69, 0x6e, 0x65, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74,
|
||||
0x18, 0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x74, 0x69, 0x6d, 0x65, 0x6f, 0x75, 0x74, 0x12,
|
||||
0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0c,
|
||||
0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x22, 0x34, 0x0a, 0x0b, 0x4e, 0x65, 0x78,
|
||||
0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x25, 0x0a, 0x06, 0x66, 0x69, 0x6c, 0x74,
|
||||
0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0d, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x2e, 0x46, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x52, 0x06, 0x66, 0x69, 0x6c, 0x74, 0x65, 0x72, 0x22,
|
||||
0x41, 0x0a, 0x0b, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e,
|
||||
0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22,
|
||||
0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61,
|
||||
0x74, 0x65, 0x22, 0x1d, 0x0a, 0x0b, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69,
|
||||
0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b,
|
||||
0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05,
|
||||
0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x3d, 0x0a, 0x0a, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x02, 0x69, 0x64, 0x12, 0x1f, 0x0a, 0x04, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x0b, 0x32, 0x0b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x69, 0x6e, 0x65, 0x52, 0x04,
|
||||
0x6c, 0x69, 0x6e, 0x65, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d, 0x0a,
|
||||
0x13, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x82, 0x01, 0x0a,
|
||||
0x14, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72,
|
||||
0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f, 0x72,
|
||||
0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x18, 0x02, 0x20,
|
||||
0x01, 0x28, 0x05, 0x52, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x12, 0x18, 0x0a,
|
||||
0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07,
|
||||
0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69,
|
||||
0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f,
|
||||
0x6e, 0x22, 0x5b, 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x76, 0x65, 0x72,
|
||||
0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x67, 0x72, 0x70, 0x63,
|
||||
0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x72, 0x76, 0x65,
|
||||
0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x0d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x3b,
|
||||
0x0a, 0x0c, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x2b,
|
||||
0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b,
|
||||
0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e,
|
||||
0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x32, 0x0a, 0x15, 0x52,
|
||||
0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64,
|
||||
0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22,
|
||||
0x49, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1f,
|
||||
0x0a, 0x0b, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x12,
|
||||
0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28,
|
||||
0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x64, 0x0a, 0x0c, 0x41, 0x75,
|
||||
0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74,
|
||||
0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74,
|
||||
0x75, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x21, 0x0a,
|
||||
0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03, 0x20,
|
||||
0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65, 0x6e,
|
||||
0x32, 0x8a, 0x04, 0x0a, 0x0a, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x12,
|
||||
0x31, 0x0a, 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x65, 0x78, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f,
|
||||
0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x49, 0x6e, 0x69, 0x74, 0x12, 0x12, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22,
|
||||
0x00, 0x12, 0x2a, 0x0a, 0x04, 0x57, 0x61, 0x69, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2e, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2a, 0x0a,
|
||||
0x04, 0x44, 0x6f, 0x6e, 0x65, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44, 0x6f,
|
||||
0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x45, 0x78, 0x74,
|
||||
0x65, 0x6e, 0x64, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x74, 0x65,
|
||||
0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x55, 0x70, 0x64,
|
||||
0x61, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64, 0x61,
|
||||
0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x28, 0x0a, 0x03, 0x4c, 0x6f, 0x67,
|
||||
0x12, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71, 0x75,
|
||||
0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74,
|
||||
0x79, 0x22, 0x00, 0x12, 0x4c, 0x0a, 0x0d, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41,
|
||||
0x67, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67,
|
||||
0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73,
|
||||
0x74, 0x1a, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74,
|
||||
0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x22,
|
||||
0x00, 0x12, 0x3a, 0x0a, 0x0c, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74,
|
||||
0x68, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74,
|
||||
0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e,
|
||||
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0x43, 0x0a,
|
||||
0x0e, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68, 0x12,
|
||||
0x31, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e,
|
||||
0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x00, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d,
|
||||
0x2f, 0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2d, 0x63, 0x69, 0x2f, 0x77,
|
||||
0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69,
|
||||
0x6e, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x33,
|
||||
0x64, 0x22, 0x41, 0x0a, 0x0b, 0x44, 0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74,
|
||||
0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64,
|
||||
0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32,
|
||||
0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74, 0x61, 0x74, 0x65, 0x52, 0x05, 0x73,
|
||||
0x74, 0x61, 0x74, 0x65, 0x22, 0x1f, 0x0a, 0x0d, 0x45, 0x78, 0x74, 0x65, 0x6e, 0x64, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x09, 0x52, 0x02, 0x69, 0x64, 0x22, 0x43, 0x0a, 0x0d, 0x55, 0x70, 0x64, 0x61, 0x74, 0x65, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01,
|
||||
0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x22, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x74,
|
||||
0x61, 0x74, 0x65, 0x52, 0x05, 0x73, 0x74, 0x61, 0x74, 0x65, 0x22, 0x39, 0x0a, 0x0a, 0x4c, 0x6f,
|
||||
0x67, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x08, 0x6c, 0x6f, 0x67, 0x45,
|
||||
0x6e, 0x74, 0x72, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x08, 0x6c, 0x6f, 0x67,
|
||||
0x45, 0x6e, 0x74, 0x72, 0x79, 0x22, 0x07, 0x0a, 0x05, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x2d,
|
||||
0x0a, 0x13, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65,
|
||||
0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18,
|
||||
0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x22, 0x82, 0x01,
|
||||
0x0a, 0x14, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52,
|
||||
0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x1a, 0x0a, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f,
|
||||
0x72, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x08, 0x70, 0x6c, 0x61, 0x74, 0x66, 0x6f,
|
||||
0x72, 0x6d, 0x12, 0x1a, 0x0a, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x18, 0x02,
|
||||
0x20, 0x01, 0x28, 0x05, 0x52, 0x08, 0x63, 0x61, 0x70, 0x61, 0x63, 0x69, 0x74, 0x79, 0x12, 0x18,
|
||||
0x0a, 0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52,
|
||||
0x07, 0x62, 0x61, 0x63, 0x6b, 0x65, 0x6e, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73,
|
||||
0x69, 0x6f, 0x6e, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69,
|
||||
0x6f, 0x6e, 0x22, 0x5b, 0x0a, 0x0f, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73,
|
||||
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x67, 0x72, 0x70, 0x63, 0x5f, 0x76, 0x65,
|
||||
0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x0b, 0x67, 0x72, 0x70,
|
||||
0x63, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x25, 0x0a, 0x0e, 0x73, 0x65, 0x72, 0x76,
|
||||
0x65, 0x72, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09,
|
||||
0x52, 0x0d, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22,
|
||||
0x3b, 0x0a, 0x0c, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12,
|
||||
0x2b, 0x0a, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
|
||||
0x0b, 0x32, 0x0f, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x50, 0x69, 0x70, 0x65, 0x6c, 0x69,
|
||||
0x6e, 0x65, 0x52, 0x08, 0x70, 0x69, 0x70, 0x65, 0x6c, 0x69, 0x6e, 0x65, 0x22, 0x32, 0x0a, 0x15,
|
||||
0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73,
|
||||
0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69,
|
||||
0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64,
|
||||
0x22, 0x49, 0x0a, 0x0b, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12,
|
||||
0x1f, 0x0a, 0x0b, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x01,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x54, 0x6f, 0x6b, 0x65, 0x6e,
|
||||
0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01,
|
||||
0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x22, 0x64, 0x0a, 0x0c, 0x41,
|
||||
0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65, 0x12, 0x16, 0x0a, 0x06, 0x73,
|
||||
0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x74, 0x61,
|
||||
0x74, 0x75, 0x73, 0x12, 0x19, 0x0a, 0x08, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x5f, 0x69, 0x64, 0x18,
|
||||
0x02, 0x20, 0x01, 0x28, 0x03, 0x52, 0x07, 0x61, 0x67, 0x65, 0x6e, 0x74, 0x49, 0x64, 0x12, 0x21,
|
||||
0x0a, 0x0c, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x5f, 0x74, 0x6f, 0x6b, 0x65, 0x6e, 0x18, 0x03,
|
||||
0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x61, 0x63, 0x63, 0x65, 0x73, 0x73, 0x54, 0x6f, 0x6b, 0x65,
|
||||
0x6e, 0x32, 0x8a, 0x04, 0x0a, 0x0a, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72,
|
||||
0x12, 0x31, 0x0a, 0x07, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x0c, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x1a, 0x16, 0x2e, 0x70, 0x72, 0x6f, 0x74,
|
||||
0x6f, 0x2e, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x22, 0x00, 0x12, 0x31, 0x0a, 0x04, 0x4e, 0x65, 0x78, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72,
|
||||
0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a,
|
||||
0x13, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4e, 0x65, 0x78, 0x74, 0x52, 0x65, 0x73, 0x70,
|
||||
0x6f, 0x6e, 0x73, 0x65, 0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x49, 0x6e, 0x69, 0x74, 0x12, 0x12,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79,
|
||||
0x22, 0x00, 0x12, 0x2a, 0x0a, 0x04, 0x57, 0x61, 0x69, 0x74, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x57, 0x61, 0x69, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2a,
|
||||
0x0a, 0x04, 0x44, 0x6f, 0x6e, 0x65, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x44,
|
||||
0x6f, 0x6e, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x45, 0x78,
|
||||
0x74, 0x65, 0x6e, 0x64, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x78, 0x74,
|
||||
0x65, 0x6e, 0x64, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x2e, 0x0a, 0x06, 0x55, 0x70,
|
||||
0x64, 0x61, 0x74, 0x65, 0x12, 0x14, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x55, 0x70, 0x64,
|
||||
0x61, 0x74, 0x65, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f,
|
||||
0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x12, 0x28, 0x0a, 0x03, 0x4c, 0x6f,
|
||||
0x67, 0x12, 0x11, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4c, 0x6f, 0x67, 0x52, 0x65, 0x71,
|
||||
0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70,
|
||||
0x74, 0x79, 0x22, 0x00, 0x12, 0x4c, 0x0a, 0x0d, 0x52, 0x65, 0x67, 0x69, 0x73, 0x74, 0x65, 0x72,
|
||||
0x41, 0x67, 0x65, 0x6e, 0x74, 0x12, 0x1b, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65,
|
||||
0x67, 0x69, 0x73, 0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x71, 0x75, 0x65,
|
||||
0x73, 0x74, 0x1a, 0x1c, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x67, 0x69, 0x73,
|
||||
0x74, 0x65, 0x72, 0x41, 0x67, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73, 0x65,
|
||||
0x22, 0x00, 0x12, 0x3a, 0x0a, 0x0c, 0x52, 0x65, 0x70, 0x6f, 0x72, 0x74, 0x48, 0x65, 0x61, 0x6c,
|
||||
0x74, 0x68, 0x12, 0x1a, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x52, 0x65, 0x70, 0x6f, 0x72,
|
||||
0x74, 0x48, 0x65, 0x61, 0x6c, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x0c,
|
||||
0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6d, 0x70, 0x74, 0x79, 0x22, 0x00, 0x32, 0x43,
|
||||
0x0a, 0x0e, 0x57, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x41, 0x75, 0x74, 0x68,
|
||||
0x12, 0x31, 0x0a, 0x04, 0x41, 0x75, 0x74, 0x68, 0x12, 0x12, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
|
||||
0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x1a, 0x13, 0x2e, 0x70,
|
||||
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x75, 0x74, 0x68, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
|
||||
0x65, 0x22, 0x00, 0x42, 0x38, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f,
|
||||
0x6d, 0x2f, 0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2d, 0x63, 0x69, 0x2f,
|
||||
0x77, 0x6f, 0x6f, 0x64, 0x70, 0x65, 0x63, 0x6b, 0x65, 0x72, 0x2f, 0x70, 0x69, 0x70, 0x65, 0x6c,
|
||||
0x69, 0x6e, 0x65, 0x2f, 0x72, 0x70, 0x63, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x06, 0x70,
|
||||
0x72, 0x6f, 0x74, 0x6f, 0x33,
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -1238,7 +1240,7 @@ func file_woodpecker_proto_rawDescGZIP() []byte {
|
|||
var file_woodpecker_proto_msgTypes = make([]protoimpl.MessageInfo, 20)
|
||||
var file_woodpecker_proto_goTypes = []interface{}{
|
||||
(*State)(nil), // 0: proto.State
|
||||
(*Line)(nil), // 1: proto.Line
|
||||
(*LogEntry)(nil), // 1: proto.LogEntry
|
||||
(*Filter)(nil), // 2: proto.Filter
|
||||
(*Pipeline)(nil), // 3: proto.Pipeline
|
||||
(*NextRequest)(nil), // 4: proto.NextRequest
|
||||
|
@ -1264,7 +1266,7 @@ var file_woodpecker_proto_depIdxs = []int32{
|
|||
0, // 2: proto.InitRequest.state:type_name -> proto.State
|
||||
0, // 3: proto.DoneRequest.state:type_name -> proto.State
|
||||
0, // 4: proto.UpdateRequest.state:type_name -> proto.State
|
||||
1, // 5: proto.LogRequest.line:type_name -> proto.Line
|
||||
1, // 5: proto.LogRequest.logEntry:type_name -> proto.LogEntry
|
||||
3, // 6: proto.NextResponse.pipeline:type_name -> proto.Pipeline
|
||||
11, // 7: proto.Woodpecker.Version:input_type -> proto.Empty
|
||||
4, // 8: proto.Woodpecker.Next:input_type -> proto.NextRequest
|
||||
|
@ -1314,7 +1316,7 @@ func file_woodpecker_proto_init() {
|
|||
}
|
||||
}
|
||||
file_woodpecker_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
|
||||
switch v := v.(*Line); i {
|
||||
switch v := v.(*LogEntry); i {
|
||||
case 0:
|
||||
return &v.state
|
||||
case 1:
|
||||
|
|
|
@ -49,11 +49,12 @@ message State {
|
|||
string error = 6;
|
||||
}
|
||||
|
||||
message Line {
|
||||
string step = 1;
|
||||
message LogEntry {
|
||||
string step_uuid = 1;
|
||||
int64 time = 2;
|
||||
int32 pos = 3;
|
||||
string out = 4;
|
||||
int32 line = 3;
|
||||
int32 type = 4; // 0 = stdout, 1 = stderr, 2 = exit-code, 3 = metadata, 4 = progress
|
||||
string data = 5;
|
||||
}
|
||||
|
||||
message Filter {
|
||||
|
@ -98,8 +99,7 @@ message UpdateRequest {
|
|||
}
|
||||
|
||||
message LogRequest {
|
||||
string id = 1;
|
||||
Line line = 2;
|
||||
LogEntry logEntry = 1;
|
||||
}
|
||||
|
||||
message Empty {
|
||||
|
|
|
@ -20,6 +20,7 @@ import (
|
|||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/oklog/ulid/v2"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
|
@ -79,6 +80,7 @@ func (b *StepBuilder) Build() ([]*Item, error) {
|
|||
|
||||
for _, axis := range axes {
|
||||
workflow := &model.Step{
|
||||
UUID: uuid.New().String(), // TODO(#1784): Remove once workflows are a separate entity in database
|
||||
PipelineID: b.Curr.ID,
|
||||
PID: pidSequence,
|
||||
PGID: pidSequence,
|
||||
|
@ -277,6 +279,9 @@ func (b *StepBuilder) toInternalRepresentation(parsed *yaml_types.Workflow, envi
|
|||
).Compile(parsed)
|
||||
}
|
||||
|
||||
// SetPipelineStepsOnPipeline is the link between pipeline representation in "pipeline package" and server
|
||||
// to be specific this func currently is used to convert the pipeline.Item list (crafted by StepBuilder.Build()) into
|
||||
// a pipeline that can be stored in the database by the server
|
||||
func SetPipelineStepsOnPipeline(pipeline *model.Pipeline, pipelineItems []*Item) *model.Pipeline {
|
||||
var pidSequence int
|
||||
for _, item := range pipelineItems {
|
||||
|
@ -295,8 +300,9 @@ func SetPipelineStepsOnPipeline(pipeline *model.Pipeline, pipelineItems []*Item)
|
|||
gid = pidSequence
|
||||
}
|
||||
step := &model.Step{
|
||||
PipelineID: pipeline.ID,
|
||||
Name: step.Alias,
|
||||
UUID: step.UUID,
|
||||
PipelineID: pipeline.ID,
|
||||
PID: pidSequence,
|
||||
PPID: item.Workflow.PID,
|
||||
PGID: gid,
|
||||
|
|
|
@ -19,17 +19,14 @@
|
|||
package api
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server"
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
|
@ -190,75 +187,29 @@ func GetPipelineLast(c *gin.Context) {
|
|||
c.JSON(http.StatusOK, pl)
|
||||
}
|
||||
|
||||
// GetPipelineLogs
|
||||
//
|
||||
// @Summary Log information per step
|
||||
// @Router /repos/{owner}/{name}/logs/{number}/{pid}/{step} [get]
|
||||
// @Produce plain
|
||||
// @Success 200
|
||||
// @Tags Pipeline logs
|
||||
// @Param Authorization header string true "Insert your personal access token" default(Bearer <personal access token>)
|
||||
// @Param owner path string true "the repository owner's name"
|
||||
// @Param name path string true "the repository name"
|
||||
// @Param number path int true "the number of the pipeline"
|
||||
// @Param pid path int true "the pipeline id"
|
||||
// @Param step path int true "the step name"
|
||||
func GetPipelineLogs(c *gin.Context) {
|
||||
_store := store.FromContext(c)
|
||||
repo := session.Repo(c)
|
||||
|
||||
// parse the pipeline number and step sequence number from
|
||||
// the request parameter.
|
||||
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
|
||||
ppid, _ := strconv.Atoi(c.Params.ByName("pid"))
|
||||
name := c.Params.ByName("step")
|
||||
|
||||
pl, err := _store.GetPipelineNumber(repo, num)
|
||||
if err != nil {
|
||||
handleDbGetError(c, err)
|
||||
return
|
||||
}
|
||||
|
||||
step, err := _store.StepChild(pl, ppid, name)
|
||||
if err != nil {
|
||||
handleDbGetError(c, err)
|
||||
return
|
||||
}
|
||||
|
||||
rc, err := _store.LogFind(step)
|
||||
if err != nil {
|
||||
handleDbGetError(c, err)
|
||||
return
|
||||
}
|
||||
|
||||
defer rc.Close()
|
||||
|
||||
c.Header("Content-Type", "application/json")
|
||||
if _, err := io.Copy(c.Writer, rc); err != nil {
|
||||
log.Error().Err(err).Msg("could not copy log to http response")
|
||||
}
|
||||
}
|
||||
|
||||
// GetStepLogs
|
||||
//
|
||||
// @Summary Log information
|
||||
// @Router /repos/{owner}/{name}/logs/{number}/{pid} [get]
|
||||
// @Produce plain
|
||||
// @Success 200
|
||||
// @Tags Pipeline logs
|
||||
// @Router /repos/{owner}/{name}/logs/{number}/{stepID} [get]
|
||||
// @Produce json
|
||||
// @Success 200 {array} LogEntry
|
||||
// @Tags Pipeline logs
|
||||
// @Param Authorization header string true "Insert your personal access token" default(Bearer <personal access token>)
|
||||
// @Param owner path string true "the repository owner's name"
|
||||
// @Param name path string true "the repository name"
|
||||
// @Param number path int true "the number of the pipeline"
|
||||
// @Param pid path int true "the pipeline id"
|
||||
// @Param number path int true "the number of the pipeline"
|
||||
// @Param stepID path int true "the step id"
|
||||
func GetStepLogs(c *gin.Context) {
|
||||
_store := store.FromContext(c)
|
||||
repo := session.Repo(c)
|
||||
|
||||
// parse the pipeline number and step sequence number from
|
||||
// the request parameter.
|
||||
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
|
||||
pid, _ := strconv.Atoi(c.Params.ByName("pid"))
|
||||
num, err := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
|
||||
if err != nil {
|
||||
_ = c.AbortWithError(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
pl, err := _store.GetPipelineNumber(repo, num)
|
||||
if err != nil {
|
||||
|
@ -266,24 +217,31 @@ func GetStepLogs(c *gin.Context) {
|
|||
return
|
||||
}
|
||||
|
||||
step, err := _store.StepFind(pl, pid)
|
||||
stepID, err := strconv.ParseInt(c.Params.ByName("stepId"), 10, 64)
|
||||
if err != nil {
|
||||
_ = c.AbortWithError(http.StatusBadRequest, err)
|
||||
return
|
||||
}
|
||||
|
||||
step, err := _store.StepLoad(stepID)
|
||||
if err != nil {
|
||||
handleDbGetError(c, err)
|
||||
return
|
||||
}
|
||||
|
||||
rc, err := _store.LogFind(step)
|
||||
if step.PipelineID != pl.ID {
|
||||
// make sure we can not read arbitrary logs by id
|
||||
_ = c.AbortWithError(http.StatusBadRequest, fmt.Errorf("step with id %d is not part of repo %s", stepID, repo.FullName))
|
||||
return
|
||||
}
|
||||
|
||||
logs, err := _store.LogFind(step)
|
||||
if err != nil {
|
||||
handleDbGetError(c, err)
|
||||
return
|
||||
}
|
||||
|
||||
defer rc.Close()
|
||||
|
||||
c.Header("Content-Type", "application/json")
|
||||
if _, err := io.Copy(c.Writer, rc); err != nil {
|
||||
log.Error().Err(err).Msg("could not copy log to http response")
|
||||
}
|
||||
c.JSON(http.StatusOK, logs)
|
||||
}
|
||||
|
||||
// GetPipelineConfig
|
||||
|
@ -532,7 +490,6 @@ func DeletePipelineLogs(c *gin.Context) {
|
|||
_store := store.FromContext(c)
|
||||
|
||||
repo := session.Repo(c)
|
||||
user := session.User(c)
|
||||
num, _ := strconv.ParseInt(c.Params.ByName("number"), 10, 64)
|
||||
|
||||
pl, err := _store.GetPipelineNumber(repo, num)
|
||||
|
@ -554,11 +511,8 @@ func DeletePipelineLogs(c *gin.Context) {
|
|||
}
|
||||
|
||||
for _, step := range steps {
|
||||
t := time.Now().UTC()
|
||||
buf := bytes.NewBufferString(fmt.Sprintf(deleteStr, step.Name, user.Login, t.Format(time.UnixDate)))
|
||||
lerr := _store.LogSave(step, buf)
|
||||
if lerr != nil {
|
||||
err = lerr
|
||||
if lErr := _store.LogDelete(step); err != nil {
|
||||
err = errors.Join(err, lErr)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
|
@ -568,11 +522,3 @@ func DeletePipelineLogs(c *gin.Context) {
|
|||
|
||||
c.String(http.StatusNoContent, "")
|
||||
}
|
||||
|
||||
var deleteStr = `[
|
||||
{
|
||||
"step": %q,
|
||||
"pos": 0,
|
||||
"out": "logs purged by %s on %s\n"
|
||||
}
|
||||
]`
|
||||
|
|
|
@ -17,6 +17,7 @@ package api
|
|||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"net/http"
|
||||
|
@ -27,7 +28,6 @@ import (
|
|||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server"
|
||||
"github.com/woodpecker-ci/woodpecker/server/logging"
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
"github.com/woodpecker-ci/woodpecker/server/pubsub"
|
||||
"github.com/woodpecker-ci/woodpecker/server/router/middleware/session"
|
||||
|
@ -121,6 +121,17 @@ func EventStreamSSE(c *gin.Context) {
|
|||
}
|
||||
}
|
||||
|
||||
// LogStream
|
||||
//
|
||||
// @Summary Log stream
|
||||
// @Router /logs/{owner}/{name}/{pipeline}/{stepID} [get]
|
||||
// @Produce plain
|
||||
// @Success 200
|
||||
// @Tags Pipeline logs
|
||||
// @Param owner path string true "the repository owner's name"
|
||||
// @Param name path string true "the repository name"
|
||||
// @Param pipeline path int true "the number of the pipeline"
|
||||
// @Param stepID path int true "the step id"
|
||||
func LogStreamSSE(c *gin.Context) {
|
||||
c.Header("Content-Type", "text/event-stream")
|
||||
c.Header("Cache-Control", "no-cache")
|
||||
|
@ -138,26 +149,43 @@ func LogStreamSSE(c *gin.Context) {
|
|||
logWriteStringErr(io.WriteString(rw, ": ping\n\n"))
|
||||
flusher.Flush()
|
||||
|
||||
repo := session.Repo(c)
|
||||
_store := store.FromContext(c)
|
||||
repo := session.Repo(c)
|
||||
|
||||
// // parse the pipeline number and step sequence number from
|
||||
// // the request parameter.
|
||||
pipelinen, _ := strconv.ParseInt(c.Param("pipeline"), 10, 64)
|
||||
stepn, _ := strconv.Atoi(c.Param("number"))
|
||||
|
||||
pipeline, err := _store.GetPipelineNumber(repo, pipelinen)
|
||||
pipeline, err := strconv.ParseInt(c.Param("pipeline"), 10, 64)
|
||||
if err != nil {
|
||||
log.Debug().Err(err).Msg("pipeline number invalid")
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: pipeline number invalid\n\n"))
|
||||
return
|
||||
}
|
||||
pl, err := _store.GetPipelineNumber(repo, pipeline)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("stream cannot get pipeline number: %v", err)
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: pipeline not found\n\n"))
|
||||
return
|
||||
}
|
||||
step, err := _store.StepFind(pipeline, stepn)
|
||||
|
||||
stepID, err := strconv.ParseInt(c.Param("stepId"), 10, 64)
|
||||
if err != nil {
|
||||
log.Debug().Err(err).Msg("step id invalid")
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: step id invalid\n\n"))
|
||||
return
|
||||
}
|
||||
step, err := _store.StepLoad(stepID)
|
||||
if err != nil {
|
||||
log.Debug().Msgf("stream cannot get step number: %v", err)
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: process not found\n\n"))
|
||||
return
|
||||
}
|
||||
|
||||
if step.PipelineID != pl.ID {
|
||||
// make sure we can not read arbitrary logs by id
|
||||
err = fmt.Errorf("step with id %d is not part of repo %s", stepID, repo.FullName)
|
||||
log.Debug().Err(err).Msg("event error")
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: "+err.Error()+"\n\n"))
|
||||
return
|
||||
}
|
||||
|
||||
if step.State != model.StatusRunning {
|
||||
log.Debug().Msg("stream not found.")
|
||||
logWriteStringErr(io.WriteString(rw, "event: error\ndata: stream not found\n\n"))
|
||||
|
@ -178,18 +206,14 @@ func LogStreamSSE(c *gin.Context) {
|
|||
}()
|
||||
|
||||
go func() {
|
||||
// TODO remove global variable
|
||||
err := server.Config.Services.Logs.Tail(ctx, fmt.Sprint(step.ID), func(entries ...*logging.Entry) {
|
||||
defer func() {
|
||||
obj := recover() // fix #2480 // TODO: check if it's still needed
|
||||
log.Trace().Msgf("pubsub subscribe recover return: %v", obj)
|
||||
}()
|
||||
err := server.Config.Services.Logs.Tail(ctx, step.ID, func(entries ...*model.LogEntry) {
|
||||
for _, entry := range entries {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
default:
|
||||
logc <- entry.Data
|
||||
ee, _ := json.Marshal(entry)
|
||||
logc <- ee
|
||||
}
|
||||
}
|
||||
})
|
||||
|
|
|
@ -238,40 +238,41 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
|
|||
|
||||
workflow, err := s.store.StepLoad(workflowID)
|
||||
if err != nil {
|
||||
log.Error().Msgf("error: cannot find step with id %d: %s", workflowID, err)
|
||||
log.Error().Err(err).Msgf("cannot find step with id %d", workflowID)
|
||||
return err
|
||||
}
|
||||
|
||||
currentPipeline, err := s.store.GetPipeline(workflow.PipelineID)
|
||||
if err != nil {
|
||||
log.Error().Msgf("error: cannot find pipeline with id %d: %s", workflow.PipelineID, err)
|
||||
log.Error().Err(err).Msgf("cannot find pipeline with id %d", workflow.PipelineID)
|
||||
return err
|
||||
}
|
||||
|
||||
repo, err := s.store.GetRepo(currentPipeline.RepoID)
|
||||
if err != nil {
|
||||
log.Error().Msgf("error: cannot find repo with id %d: %s", currentPipeline.RepoID, err)
|
||||
log.Error().Err(err).Msgf("cannot find repo with id %d", currentPipeline.RepoID)
|
||||
return err
|
||||
}
|
||||
|
||||
log.Trace().
|
||||
logger := log.With().
|
||||
Str("repo_id", fmt.Sprint(repo.ID)).
|
||||
Str("build_id", fmt.Sprint(currentPipeline.ID)).
|
||||
Str("step_id", id).
|
||||
Msgf("gRPC Done with state: %#v", state)
|
||||
Str("pipeline_id", fmt.Sprint(currentPipeline.ID)).
|
||||
Str("workflow_id", id).Logger()
|
||||
|
||||
logger.Trace().Msgf("gRPC Done with state: %#v", state)
|
||||
|
||||
if workflow, err = pipeline.UpdateStepStatusToDone(s.store, *workflow, state); err != nil {
|
||||
log.Error().Msgf("error: done: cannot update step_id %d state: %s", workflow.ID, err)
|
||||
logger.Error().Err(err).Msgf("pipeline.UpdateStepStatusToDone: cannot update workflow state: %s", err)
|
||||
}
|
||||
|
||||
var queueErr error
|
||||
if workflow.Failing() {
|
||||
queueErr = s.queue.Error(c, id, fmt.Errorf("Step finished with exitcode %d, %s", state.ExitCode, state.Error))
|
||||
queueErr = s.queue.Error(c, id, fmt.Errorf("Step finished with exit code %d, %s", state.ExitCode, state.Error))
|
||||
} else {
|
||||
queueErr = s.queue.Done(c, id, workflow.State)
|
||||
}
|
||||
if queueErr != nil {
|
||||
log.Error().Msgf("error: done: cannot ack step_id %d: %s", workflowID, err)
|
||||
logger.Error().Err(queueErr).Msg("queue.Done: cannot ack workflow")
|
||||
}
|
||||
|
||||
steps, err := s.store.StepList(currentPipeline)
|
||||
|
@ -282,15 +283,20 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
|
|||
|
||||
if !model.IsThereRunningStage(steps) {
|
||||
if currentPipeline, err = pipeline.UpdateStatusToDone(s.store, *currentPipeline, model.PipelineStatus(steps), workflow.Stopped); err != nil {
|
||||
log.Error().Err(err).Msgf("error: done: cannot update build_id %d final state", currentPipeline.ID)
|
||||
logger.Error().Err(err).Msgf("pipeline.UpdateStatusToDone: cannot update workflow final state")
|
||||
}
|
||||
}
|
||||
|
||||
s.updateForgeStatus(c, repo, currentPipeline, workflow)
|
||||
|
||||
if err := s.logger.Close(c, id); err != nil {
|
||||
log.Error().Err(err).Msgf("done: cannot close build_id %d logger", workflow.ID)
|
||||
}
|
||||
// make sure writes to pubsub are non blocking (https://github.com/woodpecker-ci/woodpecker/blob/c919f32e0b6432a95e1a6d3d0ad662f591adf73f/server/logging/log.go#L9)
|
||||
go func() {
|
||||
for _, step := range steps {
|
||||
if err := s.logger.Close(c, step.ID); err != nil {
|
||||
logger.Error().Err(err).Msgf("done: cannot close log stream for step %d", step.ID)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
if err := s.notify(c, repo, currentPipeline, steps); err != nil {
|
||||
return err
|
||||
|
@ -308,13 +314,28 @@ func (s *RPC) Done(c context.Context, id string, state rpc.State) error {
|
|||
}
|
||||
|
||||
// Log implements the rpc.Log function
|
||||
func (s *RPC) Log(c context.Context, id string, line *rpc.Line) error {
|
||||
entry := new(logging.Entry)
|
||||
entry.Data, _ = json.Marshal(line)
|
||||
if err := s.logger.Write(c, id, entry); err != nil {
|
||||
log.Error().Err(err).Msgf("rpc server could not write to logger")
|
||||
func (s *RPC) Log(c context.Context, _logEntry *rpc.LogEntry) error {
|
||||
// convert rpc log_entry to model.log_entry
|
||||
step, err := s.store.StepByUUID(_logEntry.StepUUID)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not find step with uuid %s in store: %w", _logEntry.StepUUID, err)
|
||||
}
|
||||
return nil
|
||||
logEntry := &model.LogEntry{
|
||||
StepID: step.ID,
|
||||
Time: _logEntry.Time,
|
||||
Line: _logEntry.Line,
|
||||
Data: []byte(_logEntry.Data),
|
||||
Type: model.LogEntryType(_logEntry.Type),
|
||||
}
|
||||
// make sure writes to pubsub are non blocking (https://github.com/woodpecker-ci/woodpecker/blob/c919f32e0b6432a95e1a6d3d0ad662f591adf73f/server/logging/log.go#L9)
|
||||
go func() {
|
||||
// write line to listening web clients
|
||||
if err := s.logger.Write(c, logEntry.StepID, logEntry); err != nil {
|
||||
log.Error().Err(err).Msgf("rpc server could not write to logger")
|
||||
}
|
||||
}()
|
||||
// make line persistent in database
|
||||
return s.store.LogAppend(logEntry)
|
||||
}
|
||||
|
||||
func (s *RPC) RegisterAgent(ctx context.Context, platform, backend, version string, capacity int32) (int64, error) {
|
||||
|
|
|
@ -145,14 +145,15 @@ func (s *WoodpeckerServer) Extend(c context.Context, req *proto.ExtendRequest) (
|
|||
}
|
||||
|
||||
func (s *WoodpeckerServer) Log(c context.Context, req *proto.LogRequest) (*proto.Empty, error) {
|
||||
line := &rpc.Line{
|
||||
Out: req.GetLine().GetOut(),
|
||||
Pos: int(req.GetLine().GetPos()),
|
||||
Time: req.GetLine().GetTime(),
|
||||
Step: req.GetLine().GetStep(),
|
||||
logEntry := &rpc.LogEntry{
|
||||
Data: req.GetLogEntry().GetData(),
|
||||
Line: int(req.GetLogEntry().GetLine()),
|
||||
Time: req.GetLogEntry().GetTime(),
|
||||
StepUUID: req.GetLogEntry().GetStepUuid(),
|
||||
Type: int(req.GetLogEntry().GetType()),
|
||||
}
|
||||
res := new(proto.Empty)
|
||||
err := s.peer.Log(c, req.GetId(), line)
|
||||
err := s.peer.Log(c, logEntry)
|
||||
return res, err
|
||||
}
|
||||
|
||||
|
|
|
@ -2,8 +2,9 @@ package logging
|
|||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"sync"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
// TODO (bradrydzewski) writing to subscribers is currently a blocking
|
||||
|
@ -27,58 +28,58 @@ type subscriber struct {
|
|||
type stream struct {
|
||||
sync.Mutex
|
||||
|
||||
path string
|
||||
list []*Entry
|
||||
subs map[*subscriber]struct{}
|
||||
done chan struct{}
|
||||
stepID int64
|
||||
list []*model.LogEntry
|
||||
subs map[*subscriber]struct{}
|
||||
done chan struct{}
|
||||
}
|
||||
|
||||
type log struct {
|
||||
sync.Mutex
|
||||
|
||||
streams map[string]*stream
|
||||
streams map[int64]*stream
|
||||
}
|
||||
|
||||
// New returns a new logger.
|
||||
func New() Log {
|
||||
return &log{
|
||||
streams: map[string]*stream{},
|
||||
streams: map[int64]*stream{},
|
||||
}
|
||||
}
|
||||
|
||||
func (l *log) Open(_ context.Context, path string) error {
|
||||
func (l *log) Open(_ context.Context, stepID int64) error {
|
||||
l.Lock()
|
||||
_, ok := l.streams[path]
|
||||
_, ok := l.streams[stepID]
|
||||
if !ok {
|
||||
l.streams[path] = &stream{
|
||||
path: path,
|
||||
subs: make(map[*subscriber]struct{}),
|
||||
done: make(chan struct{}),
|
||||
l.streams[stepID] = &stream{
|
||||
stepID: stepID,
|
||||
subs: make(map[*subscriber]struct{}),
|
||||
done: make(chan struct{}),
|
||||
}
|
||||
}
|
||||
l.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *log) Write(_ context.Context, path string, entry *Entry) error {
|
||||
func (l *log) Write(_ context.Context, stepID int64, logEntry *model.LogEntry) error {
|
||||
l.Lock()
|
||||
s, ok := l.streams[path]
|
||||
s, ok := l.streams[stepID]
|
||||
l.Unlock()
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
}
|
||||
s.Lock()
|
||||
s.list = append(s.list, entry)
|
||||
s.list = append(s.list, logEntry)
|
||||
for sub := range s.subs {
|
||||
go sub.handler(entry)
|
||||
go sub.handler(logEntry)
|
||||
}
|
||||
s.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *log) Tail(c context.Context, path string, handler Handler) error {
|
||||
func (l *log) Tail(c context.Context, stepID int64, handler Handler) error {
|
||||
l.Lock()
|
||||
s, ok := l.streams[path]
|
||||
s, ok := l.streams[stepID]
|
||||
l.Unlock()
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
|
@ -105,9 +106,9 @@ func (l *log) Tail(c context.Context, path string, handler Handler) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (l *log) Close(_ context.Context, path string) error {
|
||||
func (l *log) Close(_ context.Context, stepID int64) error {
|
||||
l.Lock()
|
||||
s, ok := l.streams[path]
|
||||
s, ok := l.streams[stepID]
|
||||
l.Unlock()
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
|
@ -118,29 +119,7 @@ func (l *log) Close(_ context.Context, path string) error {
|
|||
s.Unlock()
|
||||
|
||||
l.Lock()
|
||||
delete(l.streams, path)
|
||||
delete(l.streams, stepID)
|
||||
l.Unlock()
|
||||
return nil
|
||||
}
|
||||
|
||||
func (l *log) Snapshot(_ context.Context, path string, w io.Writer) error {
|
||||
l.Lock()
|
||||
s, ok := l.streams[path]
|
||||
l.Unlock()
|
||||
if !ok {
|
||||
return ErrNotFound
|
||||
}
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
for _, entry := range s.list {
|
||||
if _, err := w.Write(entry.Data); err != nil {
|
||||
return err
|
||||
}
|
||||
if _, err := w.Write(cr); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var cr = []byte{'\n'}
|
||||
|
|
|
@ -7,14 +7,15 @@ import (
|
|||
"time"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
func TestLogging(t *testing.T) {
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
|
||||
testPath = "test"
|
||||
testEntry = &Entry{
|
||||
testStepID = int64(123)
|
||||
testEntry = &model.LogEntry{
|
||||
Data: []byte("test"),
|
||||
}
|
||||
)
|
||||
|
@ -24,27 +25,27 @@ func TestLogging(t *testing.T) {
|
|||
)
|
||||
|
||||
logger := New()
|
||||
assert.NoError(t, logger.Open(ctx, testPath))
|
||||
assert.NoError(t, logger.Open(ctx, testStepID))
|
||||
go func() {
|
||||
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() }))
|
||||
assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
|
||||
}()
|
||||
go func() {
|
||||
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() }))
|
||||
assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
|
||||
}()
|
||||
|
||||
<-time.After(500 * time.Millisecond)
|
||||
|
||||
wg.Add(4)
|
||||
go func() {
|
||||
assert.NoError(t, logger.Write(ctx, testPath, testEntry))
|
||||
assert.NoError(t, logger.Write(ctx, testPath, testEntry))
|
||||
assert.NoError(t, logger.Write(ctx, testStepID, testEntry))
|
||||
assert.NoError(t, logger.Write(ctx, testStepID, testEntry))
|
||||
}()
|
||||
|
||||
wg.Wait()
|
||||
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
assert.NoError(t, logger.Tail(ctx, testPath, func(entry ...*Entry) { wg.Done() }))
|
||||
assert.NoError(t, logger.Tail(ctx, testStepID, func(entry ...*model.LogEntry) { wg.Done() }))
|
||||
}()
|
||||
|
||||
<-time.After(500 * time.Millisecond)
|
||||
|
|
|
@ -3,78 +3,27 @@ package logging
|
|||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
// ErrNotFound is returned when the log does not exist.
|
||||
var ErrNotFound = errors.New("stream: not found")
|
||||
|
||||
// Entry defines a log entry.
|
||||
type Entry struct {
|
||||
// ID identifies this message.
|
||||
ID string `json:"id,omitempty"`
|
||||
|
||||
// Data is the actual data in the entry.
|
||||
Data []byte `json:"data"`
|
||||
|
||||
// Tags represents the key-value pairs the
|
||||
// entry is tagged with.
|
||||
Tags map[string]string `json:"tags,omitempty"`
|
||||
}
|
||||
|
||||
// Handler defines a callback function for handling log entries.
|
||||
type Handler func(...*Entry)
|
||||
type Handler func(...*model.LogEntry)
|
||||
|
||||
// Log defines a log multiplexer.
|
||||
type Log interface {
|
||||
// Open opens the log.
|
||||
Open(c context.Context, path string) error
|
||||
Open(c context.Context, stepID int64) error
|
||||
|
||||
// Write writes the entry to the log.
|
||||
Write(c context.Context, path string, entry *Entry) error
|
||||
Write(c context.Context, stepID int64, entry *model.LogEntry) error
|
||||
|
||||
// Tail tails the log.
|
||||
Tail(c context.Context, path string, handler Handler) error
|
||||
Tail(c context.Context, stepID int64, handler Handler) error
|
||||
|
||||
// Close closes the log.
|
||||
Close(c context.Context, path string) error
|
||||
|
||||
// Snapshot snapshots the stream to Writer w.
|
||||
Snapshot(c context.Context, path string, w io.Writer) error
|
||||
|
||||
// Info returns runtime information about the multiplexer.
|
||||
// Info(c context.Context) (interface{}, error)
|
||||
Close(c context.Context, stepID int64) error
|
||||
}
|
||||
|
||||
// // global streamer
|
||||
// var global = New()
|
||||
//
|
||||
// // Set sets a default global logger.
|
||||
// func Set(log Log) {
|
||||
// global = log
|
||||
// }
|
||||
//
|
||||
// // Open opens the log stream.
|
||||
// func Open(c context.Context, path string) error {
|
||||
// return global.Open(c, path)
|
||||
// }
|
||||
//
|
||||
// // Write writes the log entry to the stream.
|
||||
// func Write(c context.Context, path string, entry *Entry) error {
|
||||
// return global.Write(c, path, entry)
|
||||
// }
|
||||
//
|
||||
// // Tail tails the log stream.
|
||||
// func Tail(c context.Context, path string, handler Handler) error {
|
||||
// return global.Tail(c, path, handler)
|
||||
// }
|
||||
//
|
||||
// // Close closes the log stream.
|
||||
// func Close(c context.Context, path string) error {
|
||||
// return global.Close(c, path)
|
||||
// }
|
||||
//
|
||||
// // Snapshot snapshots the stream to Writer w.
|
||||
// func Snapshot(c context.Context, path string, w io.Writer) error {
|
||||
// return global.Snapshot(c, path, w)
|
||||
// }
|
||||
|
|
|
@ -14,9 +14,29 @@
|
|||
|
||||
package model
|
||||
|
||||
type Logs struct {
|
||||
ID int64 `xorm:"pk autoincr 'log_id'"`
|
||||
StepID int64 `xorm:"UNIQUE 'log_step_id'"`
|
||||
Data []byte `xorm:"LONGBLOB 'log_data'"`
|
||||
// TODO: add create timestamp
|
||||
// LogEntryType identifies the type of line in the logs.
|
||||
type LogEntryType int // @name LogEntryType
|
||||
|
||||
const (
|
||||
LogEntryStdout LogEntryType = iota
|
||||
LogEntryStderr
|
||||
LogEntryExitCode
|
||||
LogEntryMetadata
|
||||
LogEntryProgress
|
||||
)
|
||||
|
||||
type LogEntry struct {
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||
StepID int64 `json:"step_id" xorm:"'step_id'"`
|
||||
Time int64 `json:"time"`
|
||||
Line int `json:"line"`
|
||||
Data []byte `json:"data" xorm:"LONGBLOB"`
|
||||
Created int64 `json:"-" xorm:"created"`
|
||||
Type LogEntryType `json:"type"`
|
||||
} // @name LogEntry
|
||||
|
||||
// TODO: store info what specific command the line belongs to (must be optional and impl. by backend)
|
||||
|
||||
func (LogEntry) TableName() string {
|
||||
return "log_entries"
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ type StepStore interface {
|
|||
// Step represents a process in the pipeline.
|
||||
type Step struct {
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'step_id'"`
|
||||
UUID string `json:"uuid" xorm:"UNIQUE INDEX 'step_uuid'"`
|
||||
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"`
|
||||
PID int `json:"pid" xorm:"UNIQUE(s) 'step_pid'"`
|
||||
PPID int `json:"ppid" xorm:"step_ppid"`
|
||||
|
|
|
@ -23,6 +23,7 @@ import (
|
|||
func TestTree(t *testing.T) {
|
||||
steps := []*Step{{
|
||||
ID: 25,
|
||||
UUID: "f80df0bb-77a7-4964-9412-2e1049872d57",
|
||||
PID: 2,
|
||||
PipelineID: 6,
|
||||
PPID: 1,
|
||||
|
@ -32,6 +33,7 @@ func TestTree(t *testing.T) {
|
|||
Error: "0",
|
||||
}, {
|
||||
ID: 24,
|
||||
UUID: "c19b49c5-990d-4722-ba9c-1c4fe9db1f91",
|
||||
PipelineID: 6,
|
||||
PID: 1,
|
||||
PPID: 0,
|
||||
|
@ -41,6 +43,7 @@ func TestTree(t *testing.T) {
|
|||
Error: "1",
|
||||
}, {
|
||||
ID: 26,
|
||||
UUID: "4380146f-c0ff-4482-8107-c90937d1faba",
|
||||
PipelineID: 6,
|
||||
PID: 3,
|
||||
PPID: 1,
|
||||
|
@ -56,6 +59,7 @@ func TestTree(t *testing.T) {
|
|||
|
||||
steps = []*Step{{
|
||||
ID: 25,
|
||||
UUID: "f80df0bb-77a7-4964-9412-2e1049872d57",
|
||||
PID: 2,
|
||||
PipelineID: 6,
|
||||
PPID: 1,
|
||||
|
|
|
@ -131,18 +131,18 @@ func cancelPreviousPipelines(
|
|||
return err
|
||||
}
|
||||
|
||||
pipelineNeedsCancel := func(active *model.Pipeline) (bool, error) {
|
||||
pipelineNeedsCancel := func(active *model.Pipeline) bool {
|
||||
// always filter on same event
|
||||
if active.Event != pipeline.Event {
|
||||
return false, nil
|
||||
return false
|
||||
}
|
||||
|
||||
// find events for the same context
|
||||
switch pipeline.Event {
|
||||
case model.EventPush:
|
||||
return pipeline.Branch == active.Branch, nil
|
||||
return pipeline.Branch == active.Branch
|
||||
default:
|
||||
return pipeline.Refspec == active.Refspec, nil
|
||||
return pipeline.Refspec == active.Refspec
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,14 +152,7 @@ func cancelPreviousPipelines(
|
|||
continue
|
||||
}
|
||||
|
||||
cancel, err := pipelineNeedsCancel(active)
|
||||
if err != nil {
|
||||
log.Error().
|
||||
Err(err).
|
||||
Str("Ref", active.Ref).
|
||||
Msg("Error while trying to cancel pipeline, skipping")
|
||||
continue
|
||||
}
|
||||
cancel := pipelineNeedsCancel(active)
|
||||
|
||||
if !cancel {
|
||||
continue
|
||||
|
|
|
@ -51,7 +51,7 @@ func zeroSteps(currentPipeline *model.Pipeline, forgeYamlConfigs []*forge_types.
|
|||
return false
|
||||
}
|
||||
|
||||
// TODO: parse yaml once and not for each filter function
|
||||
// TODO: parse yaml once and not for each filter function (-> move server/pipeline/filter* into pipeline/step_builder)
|
||||
// Check if at least one pipeline step will be execute otherwise we will just ignore this webhook
|
||||
func checkIfFiltered(repo *model.Repo, p *model.Pipeline, forgeYamlConfigs []*forge_types.FileMeta) (bool, error) {
|
||||
log.Trace().Msgf("hook.branchFiltered(): pipeline branch: '%s' pipeline event: '%s' config count: %d", p.Branch, p.Event, len(forgeYamlConfigs))
|
||||
|
|
|
@ -49,9 +49,6 @@ func queuePipeline(repo *model.Repo, pipelineItems []*pipeline.Item) error {
|
|||
Timeout: repo.Timeout,
|
||||
})
|
||||
|
||||
if err := server.Config.Services.Logs.Open(context.Background(), task.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
tasks = append(tasks, task)
|
||||
}
|
||||
return server.Config.Services.Queue.PushAtOnce(context.Background(), tasks)
|
||||
|
|
|
@ -20,6 +20,7 @@ import (
|
|||
"github.com/rs/zerolog/log"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/pipeline"
|
||||
"github.com/woodpecker-ci/woodpecker/server"
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
"github.com/woodpecker-ci/woodpecker/server/store"
|
||||
)
|
||||
|
@ -46,6 +47,16 @@ func start(ctx context.Context, store store.Store, activePipeline *model.Pipelin
|
|||
return nil, err
|
||||
}
|
||||
|
||||
// open logs streamer for each step
|
||||
go func() {
|
||||
steps := activePipeline.Steps
|
||||
for _, step := range steps {
|
||||
if err := server.Config.Services.Logs.Open(context.Background(), step.ID); err != nil {
|
||||
log.Error().Err(err).Msgf("could not open log stream for step %d", step.ID)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
updatePipelineStatus(ctx, activePipeline, repo, user)
|
||||
|
||||
return activePipeline, nil
|
||||
|
|
|
@ -89,8 +89,7 @@ func apiRoutes(e *gin.Engine) {
|
|||
repo.POST("/pipelines/:number/approve", session.MustPush, api.PostApproval)
|
||||
repo.POST("/pipelines/:number/decline", session.MustPush, api.PostDecline)
|
||||
|
||||
repo.GET("/logs/:number/:pid", api.GetStepLogs)
|
||||
repo.GET("/logs/:number/:pid/:step", api.GetPipelineLogs)
|
||||
repo.GET("/logs/:number/:stepId", api.GetStepLogs)
|
||||
|
||||
// requires push permissions
|
||||
repo.DELETE("/logs/:number", session.MustPush, api.DeletePipelineLogs)
|
||||
|
@ -179,6 +178,15 @@ func apiRoutes(e *gin.Engine) {
|
|||
|
||||
apiBase.POST("/hook", api.PostHook)
|
||||
|
||||
stream := apiBase.Group("/stream")
|
||||
{
|
||||
stream.GET("/logs/:owner/:name/:pipeline/:stepId",
|
||||
session.SetRepo(),
|
||||
session.SetPerm(),
|
||||
session.MustPull,
|
||||
api.LogStreamSSE)
|
||||
}
|
||||
|
||||
if zerolog.GlobalLevel() <= zerolog.DebugLevel {
|
||||
debugger := apiBase.Group("/debug")
|
||||
{
|
||||
|
@ -204,11 +212,5 @@ func apiRoutes(e *gin.Engine) {
|
|||
sse := e.Group("/stream")
|
||||
{
|
||||
sse.GET("/events", api.EventStreamSSE)
|
||||
sse.GET("/logs/:owner/:name/:pipeline/:number",
|
||||
session.SetRepo(),
|
||||
session.SetPerm(),
|
||||
session.MustPull,
|
||||
api.LogStreamSSE,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -15,50 +15,41 @@
|
|||
package datastore
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"fmt"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
func (s storage) LogFind(step *model.Step) (io.ReadCloser, error) {
|
||||
logs := &model.Logs{
|
||||
StepID: step.ID,
|
||||
}
|
||||
if err := wrapGet(s.engine.Get(logs)); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
buf := bytes.NewBuffer(logs.Data)
|
||||
return io.NopCloser(buf), nil
|
||||
func (s storage) LogFind(step *model.Step) ([]*model.LogEntry, error) {
|
||||
var logEntries []*model.LogEntry
|
||||
return logEntries, s.engine.Asc("id").Where("step_id = ?", step.ID).Find(&logEntries)
|
||||
}
|
||||
|
||||
func (s storage) LogSave(step *model.Step, reader io.Reader) error {
|
||||
data, _ := io.ReadAll(reader)
|
||||
|
||||
func (s storage) LogSave(step *model.Step, logEntries []*model.LogEntry) error {
|
||||
sess := s.engine.NewSession()
|
||||
defer sess.Close()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
logs := new(model.Logs)
|
||||
exist, err := sess.Where("log_step_id = ?", step.ID).Get(logs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if exist {
|
||||
if _, err := sess.ID(logs.ID).Cols("log_data").Update(&model.Logs{Data: data}); err != nil {
|
||||
return err
|
||||
for _, logEntry := range logEntries {
|
||||
if logEntry.StepID != step.ID {
|
||||
return fmt.Errorf("got a log-entry with step id '%d' but expected '%d'", logEntry.StepID, step.ID)
|
||||
}
|
||||
} else {
|
||||
if _, err := sess.Insert(&model.Logs{
|
||||
StepID: step.ID,
|
||||
Data: data,
|
||||
}); err != nil {
|
||||
if _, err := sess.Insert(logEntry); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return sess.Commit()
|
||||
}
|
||||
|
||||
func (s storage) LogAppend(logEntry *model.LogEntry) error {
|
||||
_, err := s.engine.Insert(logEntry)
|
||||
return err
|
||||
}
|
||||
|
||||
func (s storage) LogDelete(step *model.Step) error {
|
||||
_, err := s.engine.Where("step_id = ?", step.ID).Delete(new(model.LogEntry))
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -15,64 +15,84 @@
|
|||
package datastore
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
func TestLogCreateFind(t *testing.T) {
|
||||
store, closer := newTestStore(t, new(model.Step), new(model.Logs))
|
||||
func TestLogCreateFindDelete(t *testing.T) {
|
||||
store, closer := newTestStore(t, new(model.Step), new(model.LogEntry))
|
||||
defer closer()
|
||||
|
||||
step := model.Step{
|
||||
ID: 1,
|
||||
}
|
||||
buf := bytes.NewBufferString("echo hi")
|
||||
err := store.LogSave(&step, buf)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: log create: %s", err)
|
||||
|
||||
logEntries := []*model.LogEntry{
|
||||
{
|
||||
StepID: step.ID,
|
||||
Data: []byte("hello"),
|
||||
Line: 1,
|
||||
Time: 0,
|
||||
},
|
||||
{
|
||||
StepID: step.ID,
|
||||
Data: []byte("world"),
|
||||
Line: 2,
|
||||
Time: 10,
|
||||
},
|
||||
}
|
||||
|
||||
rc, err := store.LogFind(&step)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: log create: %s", err)
|
||||
}
|
||||
// first insert should just work
|
||||
assert.NoError(t, store.LogSave(&step, logEntries))
|
||||
|
||||
defer rc.Close()
|
||||
out, _ := io.ReadAll(rc)
|
||||
if got, want := string(out), "echo hi"; got != want {
|
||||
t.Errorf("Want log data %s, got %s", want, got)
|
||||
}
|
||||
// we want to find our inserted logs
|
||||
_logEntries, err := store.LogFind(&step)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, _logEntries, len(logEntries))
|
||||
|
||||
// delete and check
|
||||
assert.NoError(t, store.LogDelete(&step))
|
||||
_logEntries, err = store.LogFind(&step)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, _logEntries, 0)
|
||||
}
|
||||
|
||||
func TestLogUpdate(t *testing.T) {
|
||||
store, closer := newTestStore(t, new(model.Step), new(model.Logs))
|
||||
func TestLogAppend(t *testing.T) {
|
||||
store, closer := newTestStore(t, new(model.Step), new(model.LogEntry))
|
||||
defer closer()
|
||||
|
||||
step := model.Step{
|
||||
ID: 1,
|
||||
}
|
||||
buf1 := bytes.NewBufferString("echo hi")
|
||||
buf2 := bytes.NewBufferString("echo allo?")
|
||||
err1 := store.LogSave(&step, buf1)
|
||||
err2 := store.LogSave(&step, buf2)
|
||||
if err1 != nil {
|
||||
t.Errorf("Unexpected error: log create: %s", err1)
|
||||
}
|
||||
if err2 != nil {
|
||||
t.Errorf("Unexpected error: log update: %s", err2)
|
||||
logEntries := []*model.LogEntry{
|
||||
{
|
||||
StepID: step.ID,
|
||||
Data: []byte("hello"),
|
||||
Line: 1,
|
||||
Time: 0,
|
||||
},
|
||||
{
|
||||
StepID: step.ID,
|
||||
Data: []byte("world"),
|
||||
Line: 2,
|
||||
Time: 10,
|
||||
},
|
||||
}
|
||||
|
||||
rc, err := store.LogFind(&step)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error: log create: %s", err)
|
||||
assert.NoError(t, store.LogSave(&step, logEntries))
|
||||
|
||||
logEntry := &model.LogEntry{
|
||||
StepID: step.ID,
|
||||
Data: []byte("allo?"),
|
||||
Line: 3,
|
||||
Time: 20,
|
||||
}
|
||||
|
||||
defer rc.Close()
|
||||
out, _ := io.ReadAll(rc)
|
||||
if got, want := string(out), "echo allo?"; got != want {
|
||||
t.Errorf("Want log data %s, got %s", want, got)
|
||||
}
|
||||
assert.NoError(t, store.LogAppend(logEntry))
|
||||
|
||||
_logEntries, err := store.LogFind(&step)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, _logEntries, len(logEntries)+1)
|
||||
}
|
||||
|
|
|
@ -54,7 +54,7 @@ var allBeans = []interface{}{
|
|||
new(model.Pipeline),
|
||||
new(model.PipelineConfig),
|
||||
new(model.Config),
|
||||
new(model.Logs),
|
||||
new(model.LogEntry),
|
||||
new(model.Perm),
|
||||
new(model.Step),
|
||||
new(model.Registry),
|
||||
|
|
|
@ -299,7 +299,7 @@ func TestRepoCrud(t *testing.T) {
|
|||
new(model.Perm),
|
||||
new(model.Pipeline),
|
||||
new(model.PipelineConfig),
|
||||
new(model.Logs),
|
||||
new(model.LogEntry),
|
||||
new(model.Step),
|
||||
new(model.Secret),
|
||||
new(model.Registry),
|
||||
|
@ -334,6 +334,7 @@ func TestRepoCrud(t *testing.T) {
|
|||
RepoID: repoUnrelated.ID,
|
||||
}
|
||||
stepUnrelated := model.Step{
|
||||
UUID: "44c0de71-a6be-41c9-b860-e3716d1dfcef",
|
||||
Name: "a unrelated step",
|
||||
}
|
||||
assert.NoError(t, store.CreatePipeline(&pipelineUnrelated, &stepUnrelated))
|
||||
|
|
|
@ -33,6 +33,11 @@ func (s storage) StepFind(pipeline *model.Pipeline, pid int) (*model.Step, error
|
|||
return step, wrapGet(s.engine.Get(step))
|
||||
}
|
||||
|
||||
func (s storage) StepByUUID(uuid string) (*model.Step, error) {
|
||||
step := new(model.Step)
|
||||
return step, wrapGet(s.engine.Where("step_uuid = ?", uuid).Get(step))
|
||||
}
|
||||
|
||||
func (s storage) StepChild(pipeline *model.Pipeline, ppid int, child string) (*model.Step, error) {
|
||||
step := &model.Step{
|
||||
PipelineID: pipeline.ID,
|
||||
|
@ -87,7 +92,7 @@ func (s storage) StepClear(pipeline *model.Pipeline) error {
|
|||
}
|
||||
|
||||
func deleteStep(sess *xorm.Session, stepID int64) error {
|
||||
if _, err := sess.Where("log_step_id = ?", stepID).Delete(new(model.Logs)); err != nil {
|
||||
if _, err := sess.Where("step_id = ?", stepID).Delete(new(model.LogEntry)); err != nil {
|
||||
return err
|
||||
}
|
||||
_, err := sess.ID(stepID).Delete(new(model.Step))
|
||||
|
|
|
@ -21,6 +21,7 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
"github.com/woodpecker-ci/woodpecker/server/store/types"
|
||||
)
|
||||
|
||||
func TestStepFind(t *testing.T) {
|
||||
|
@ -29,6 +30,7 @@ func TestStepFind(t *testing.T) {
|
|||
|
||||
steps := []*model.Step{
|
||||
{
|
||||
UUID: "8d89104f-d44e-4b45-b86e-17f8b5e74a0e",
|
||||
PipelineID: 1000,
|
||||
PID: 1,
|
||||
PPID: 2,
|
||||
|
@ -59,6 +61,7 @@ func TestStepChild(t *testing.T) {
|
|||
|
||||
err := store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "ea6d4008-8ace-4f8a-ad03-53f1756465d9",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
|
@ -66,6 +69,7 @@ func TestStepChild(t *testing.T) {
|
|||
State: "success",
|
||||
},
|
||||
{
|
||||
UUID: "2bf387f7-2913-4907-814c-c9ada88707c0",
|
||||
PipelineID: 1,
|
||||
PID: 2,
|
||||
PGID: 2,
|
||||
|
@ -98,6 +102,7 @@ func TestStepList(t *testing.T) {
|
|||
|
||||
err := store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "2bf387f7-2913-4907-814c-c9ada88707c0",
|
||||
PipelineID: 2,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
|
@ -105,6 +110,7 @@ func TestStepList(t *testing.T) {
|
|||
State: "success",
|
||||
},
|
||||
{
|
||||
UUID: "4b04073c-1827-4aa4-a5f5-c7b21c5e44a6",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
|
@ -112,6 +118,7 @@ func TestStepList(t *testing.T) {
|
|||
State: "success",
|
||||
},
|
||||
{
|
||||
UUID: "40aab045-970b-4892-b6df-6f825a7ec97a",
|
||||
PipelineID: 1,
|
||||
PID: 2,
|
||||
PGID: 2,
|
||||
|
@ -139,6 +146,7 @@ func TestStepUpdate(t *testing.T) {
|
|||
defer closer()
|
||||
|
||||
step := &model.Step{
|
||||
UUID: "fc7c7fd6-553e-480b-8ed7-30d8563d0b79",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 2,
|
||||
|
@ -176,6 +184,7 @@ func TestStepIndexes(t *testing.T) {
|
|||
|
||||
if err := store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
|
@ -191,6 +200,7 @@ func TestStepIndexes(t *testing.T) {
|
|||
// fail due to duplicate pid
|
||||
if err := store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "c1f33a9e-2a02-4579-95ec-90255d785a12",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
|
@ -201,6 +211,60 @@ func TestStepIndexes(t *testing.T) {
|
|||
}); err == nil {
|
||||
t.Errorf("Unexpected error: duplicate pid")
|
||||
}
|
||||
|
||||
// fail due to duplicate uuid
|
||||
if err := store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
|
||||
PipelineID: 5,
|
||||
PID: 4,
|
||||
PPID: 3,
|
||||
PGID: 2,
|
||||
State: "success",
|
||||
Name: "clone",
|
||||
},
|
||||
}); err == nil {
|
||||
t.Errorf("Unexpected error: duplicate pid")
|
||||
}
|
||||
}
|
||||
|
||||
func TestStepByUUID(t *testing.T) {
|
||||
store, closer := newTestStore(t, new(model.Step), new(model.Pipeline))
|
||||
defer closer()
|
||||
|
||||
assert.NoError(t, store.StepCreate([]*model.Step{
|
||||
{
|
||||
UUID: "4db7e5fc-5312-4d02-9e14-b51b9e3242cc",
|
||||
PipelineID: 1,
|
||||
PID: 1,
|
||||
PPID: 1,
|
||||
PGID: 1,
|
||||
State: "running",
|
||||
Name: "build",
|
||||
},
|
||||
{
|
||||
UUID: "fc7c7fd6-553e-480b-8ed7-30d8563d0b79",
|
||||
PipelineID: 4,
|
||||
PID: 6,
|
||||
PPID: 7,
|
||||
PGID: 8,
|
||||
Name: "build",
|
||||
State: "pending",
|
||||
Error: "pc load letter",
|
||||
ExitCode: 255,
|
||||
AgentID: 1,
|
||||
Platform: "linux/amd64",
|
||||
Environ: map[string]string{"GOLANG": "tip"},
|
||||
},
|
||||
}))
|
||||
|
||||
step, err := store.StepByUUID("4db7e5fc-5312-4d02-9e14-b51b9e3242cc")
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, step)
|
||||
|
||||
step, err = store.StepByUUID("52feb6f5-8ce2-40c0-9937-9d0e3349c98c")
|
||||
assert.ErrorIs(t, err, types.RecordNotExist)
|
||||
assert.Empty(t, step)
|
||||
}
|
||||
|
||||
// TODO: func TestStepCascade(t *testing.T) {}
|
||||
|
|
|
@ -3,8 +3,6 @@
|
|||
package mocks
|
||||
|
||||
import (
|
||||
io "io"
|
||||
|
||||
mock "github.com/stretchr/testify/mock"
|
||||
model "github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
@ -1089,20 +1087,48 @@ func (_m *Store) HasRedirectionForRepo(_a0 int64, _a1 string) (bool, error) {
|
|||
return r0, r1
|
||||
}
|
||||
|
||||
// LogFind provides a mock function with given fields: _a0
|
||||
func (_m *Store) LogFind(_a0 *model.Step) (io.ReadCloser, error) {
|
||||
// LogAppend provides a mock function with given fields: logEntry
|
||||
func (_m *Store) LogAppend(logEntry *model.LogEntry) error {
|
||||
ret := _m.Called(logEntry)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(*model.LogEntry) error); ok {
|
||||
r0 = rf(logEntry)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// LogDelete provides a mock function with given fields: _a0
|
||||
func (_m *Store) LogDelete(_a0 *model.Step) error {
|
||||
ret := _m.Called(_a0)
|
||||
|
||||
var r0 io.ReadCloser
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(*model.Step) error); ok {
|
||||
r0 = rf(_a0)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
||||
return r0
|
||||
}
|
||||
|
||||
// LogFind provides a mock function with given fields: _a0
|
||||
func (_m *Store) LogFind(_a0 *model.Step) ([]*model.LogEntry, error) {
|
||||
ret := _m.Called(_a0)
|
||||
|
||||
var r0 []*model.LogEntry
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(*model.Step) (io.ReadCloser, error)); ok {
|
||||
if rf, ok := ret.Get(0).(func(*model.Step) ([]*model.LogEntry, error)); ok {
|
||||
return rf(_a0)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(*model.Step) io.ReadCloser); ok {
|
||||
if rf, ok := ret.Get(0).(func(*model.Step) []*model.LogEntry); ok {
|
||||
r0 = rf(_a0)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(io.ReadCloser)
|
||||
r0 = ret.Get(0).([]*model.LogEntry)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1116,11 +1142,11 @@ func (_m *Store) LogFind(_a0 *model.Step) (io.ReadCloser, error) {
|
|||
}
|
||||
|
||||
// LogSave provides a mock function with given fields: _a0, _a1
|
||||
func (_m *Store) LogSave(_a0 *model.Step, _a1 io.Reader) error {
|
||||
func (_m *Store) LogSave(_a0 *model.Step, _a1 []*model.LogEntry) error {
|
||||
ret := _m.Called(_a0, _a1)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func(*model.Step, io.Reader) error); ok {
|
||||
if rf, ok := ret.Get(0).(func(*model.Step, []*model.LogEntry) error); ok {
|
||||
r0 = rf(_a0, _a1)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
|
@ -1609,6 +1635,32 @@ func (_m *Store) ServerConfigSet(_a0 string, _a1 string) error {
|
|||
return r0
|
||||
}
|
||||
|
||||
// StepByUUID provides a mock function with given fields: _a0
|
||||
func (_m *Store) StepByUUID(_a0 string) (*model.Step, error) {
|
||||
ret := _m.Called(_a0)
|
||||
|
||||
var r0 *model.Step
|
||||
var r1 error
|
||||
if rf, ok := ret.Get(0).(func(string) (*model.Step, error)); ok {
|
||||
return rf(_a0)
|
||||
}
|
||||
if rf, ok := ret.Get(0).(func(string) *model.Step); ok {
|
||||
r0 = rf(_a0)
|
||||
} else {
|
||||
if ret.Get(0) != nil {
|
||||
r0 = ret.Get(0).(*model.Step)
|
||||
}
|
||||
}
|
||||
|
||||
if rf, ok := ret.Get(1).(func(string) error); ok {
|
||||
r1 = rf(_a0)
|
||||
} else {
|
||||
r1 = ret.Error(1)
|
||||
}
|
||||
|
||||
return r0, r1
|
||||
}
|
||||
|
||||
// StepChild provides a mock function with given fields: _a0, _a1, _a2
|
||||
func (_m *Store) StepChild(_a0 *model.Pipeline, _a1 int, _a2 string) (*model.Step, error) {
|
||||
ret := _m.Called(_a0, _a1, _a2)
|
||||
|
|
|
@ -18,8 +18,6 @@ package store
|
|||
//go:generate mockery --name Store --output mocks --case underscore
|
||||
|
||||
import (
|
||||
"io"
|
||||
|
||||
"github.com/woodpecker-ci/woodpecker/server/model"
|
||||
)
|
||||
|
||||
|
@ -138,6 +136,7 @@ type Store interface {
|
|||
// Steps
|
||||
StepLoad(int64) (*model.Step, error)
|
||||
StepFind(*model.Pipeline, int) (*model.Step, error)
|
||||
StepByUUID(string) (*model.Step, error)
|
||||
StepChild(*model.Pipeline, int, string) (*model.Step, error)
|
||||
StepList(*model.Pipeline) ([]*model.Step, error)
|
||||
StepCreate([]*model.Step) error
|
||||
|
@ -145,10 +144,10 @@ type Store interface {
|
|||
StepClear(*model.Pipeline) error
|
||||
|
||||
// Logs
|
||||
LogFind(*model.Step) (io.ReadCloser, error)
|
||||
// TODO: since we do ReadAll in any case a ioReader is not the best idea
|
||||
// so either find a way to write log in chunks by xorm ...
|
||||
LogSave(*model.Step, io.Reader) error
|
||||
LogFind(*model.Step) ([]*model.LogEntry, error)
|
||||
LogSave(*model.Step, []*model.LogEntry) error
|
||||
LogAppend(logEntry *model.LogEntry) error
|
||||
LogDelete(*model.Step) error
|
||||
|
||||
// Tasks
|
||||
// TaskList TODO: paginate & opt filter
|
||||
|
|
|
@ -64,13 +64,13 @@
|
|||
</div>
|
||||
</template>
|
||||
|
||||
<script lang="ts">
|
||||
<script lang="ts" setup>
|
||||
import '~/style/console.css';
|
||||
|
||||
import { useStorage } from '@vueuse/core';
|
||||
import AnsiUp from 'ansi_up';
|
||||
import { debounce } from 'lodash';
|
||||
import { computed, defineComponent, inject, nextTick, onMounted, PropType, Ref, ref, toRef, watch } from 'vue';
|
||||
import { computed, inject, nextTick, onMounted, Ref, ref, toRef, watch } from 'vue';
|
||||
import { useI18n } from 'vue-i18n';
|
||||
|
||||
import Button from '~/components/atomic/Button.vue';
|
||||
|
@ -86,227 +86,191 @@ type LogLine = {
|
|||
time?: number;
|
||||
};
|
||||
|
||||
export default defineComponent({
|
||||
name: 'PipelineLog',
|
||||
const props = defineProps<{
|
||||
pipeline: Pipeline;
|
||||
stepId: number;
|
||||
}>();
|
||||
|
||||
components: { Icon, Button },
|
||||
defineEmits<{
|
||||
(event: 'update:step-id', stepId: number | null): true;
|
||||
}>();
|
||||
|
||||
props: {
|
||||
pipeline: {
|
||||
type: Object as PropType<Pipeline>,
|
||||
required: true,
|
||||
},
|
||||
const notifications = useNotifications();
|
||||
const i18n = useI18n();
|
||||
const pipeline = toRef(props, 'pipeline');
|
||||
const stepId = toRef(props, 'stepId');
|
||||
const repo = inject<Ref<Repo>>('repo');
|
||||
const apiClient = useApiClient();
|
||||
|
||||
stepId: {
|
||||
type: Number,
|
||||
required: true,
|
||||
},
|
||||
},
|
||||
const loadedStepSlug = ref<string>();
|
||||
const stepSlug = computed(() => `${repo?.value.owner} - ${repo?.value.name} - ${pipeline.value.id} - ${stepId.value}`);
|
||||
const step = computed(() => pipeline.value && findStep(pipeline.value.steps || [], stepId.value));
|
||||
const stream = ref<EventSource>();
|
||||
const log = ref<LogLine[]>();
|
||||
const consoleElement = ref<Element>();
|
||||
|
||||
emits: {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
'update:step-id': (stepId: number | null) => true,
|
||||
},
|
||||
const loadedLogs = computed(() => !!log.value);
|
||||
const hasLogs = computed(
|
||||
() =>
|
||||
// we do not have logs for skipped steps
|
||||
repo?.value && pipeline.value && step.value && step.value.state !== 'skipped' && step.value.state !== 'killed',
|
||||
);
|
||||
const autoScroll = useStorage('log-auto-scroll', false);
|
||||
const showActions = ref(false);
|
||||
const downloadInProgress = ref(false);
|
||||
const ansiUp = ref(new AnsiUp());
|
||||
ansiUp.value.use_classes = true;
|
||||
const logBuffer = ref<LogLine[]>([]);
|
||||
|
||||
setup(props) {
|
||||
const notifications = useNotifications();
|
||||
const i18n = useI18n();
|
||||
const pipeline = toRef(props, 'pipeline');
|
||||
const stepId = toRef(props, 'stepId');
|
||||
const repo = inject<Ref<Repo>>('repo');
|
||||
const apiClient = useApiClient();
|
||||
const maxLineCount = 500; // TODO: think about way to support lazy-loading more than last 300 logs (#776)
|
||||
|
||||
const loadedStepSlug = ref<string>();
|
||||
const stepSlug = computed(
|
||||
() => `${repo?.value.owner} - ${repo?.value.name} - ${pipeline.value.id} - ${stepId.value}`,
|
||||
function formatTime(time?: number): string {
|
||||
return time === undefined ? '' : `${time}s`;
|
||||
}
|
||||
|
||||
function writeLog(line: LogLine) {
|
||||
logBuffer.value.push({
|
||||
index: line.index ?? 0,
|
||||
text: ansiUp.value.ansi_to_html(line.text),
|
||||
time: line.time ?? 0,
|
||||
});
|
||||
}
|
||||
|
||||
function scrollDown() {
|
||||
nextTick(() => {
|
||||
if (!consoleElement.value) {
|
||||
return;
|
||||
}
|
||||
consoleElement.value.scrollTop = consoleElement.value.scrollHeight;
|
||||
});
|
||||
}
|
||||
|
||||
const flushLogs = debounce((scroll: boolean) => {
|
||||
let buffer = logBuffer.value.slice(-maxLineCount);
|
||||
logBuffer.value = [];
|
||||
|
||||
if (buffer.length === 0) {
|
||||
if (!log.value) {
|
||||
log.value = [];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// append old logs lines
|
||||
if (buffer.length < maxLineCount && log.value) {
|
||||
buffer = [...log.value.slice(-(maxLineCount - buffer.length)), ...buffer];
|
||||
}
|
||||
|
||||
// deduplicate repeating times
|
||||
buffer = buffer.reduce(
|
||||
(acc, line) => ({
|
||||
lastTime: line.time ?? 0,
|
||||
lines: [
|
||||
...acc.lines,
|
||||
{
|
||||
...line,
|
||||
time: acc.lastTime === line.time ? undefined : line.time,
|
||||
},
|
||||
],
|
||||
}),
|
||||
{ lastTime: -1, lines: [] as LogLine[] },
|
||||
).lines;
|
||||
|
||||
log.value = buffer;
|
||||
|
||||
if (scroll && autoScroll.value) {
|
||||
scrollDown();
|
||||
}
|
||||
}, 500);
|
||||
|
||||
async function download() {
|
||||
if (!repo?.value || !pipeline.value || !step.value) {
|
||||
throw new Error('The repository, pipeline or step was undefined');
|
||||
}
|
||||
let logs;
|
||||
try {
|
||||
downloadInProgress.value = true;
|
||||
logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.id);
|
||||
} catch (e) {
|
||||
notifications.notifyError(e, i18n.t('repo.pipeline.log_download_error'));
|
||||
return;
|
||||
} finally {
|
||||
downloadInProgress.value = false;
|
||||
}
|
||||
const fileURL = window.URL.createObjectURL(
|
||||
new Blob([logs.map((line) => atob(line.data)).join('')], {
|
||||
type: 'text/plain',
|
||||
}),
|
||||
);
|
||||
const fileLink = document.createElement('a');
|
||||
|
||||
fileLink.href = fileURL;
|
||||
fileLink.setAttribute(
|
||||
'download',
|
||||
`${repo.value.owner}-${repo.value.name}-${pipeline.value.number}-${step.value.name}.log`,
|
||||
);
|
||||
document.body.appendChild(fileLink);
|
||||
|
||||
fileLink.click();
|
||||
document.body.removeChild(fileLink);
|
||||
window.URL.revokeObjectURL(fileURL);
|
||||
}
|
||||
|
||||
async function loadLogs() {
|
||||
if (loadedStepSlug.value === stepSlug.value) {
|
||||
return;
|
||||
}
|
||||
loadedStepSlug.value = stepSlug.value;
|
||||
log.value = undefined;
|
||||
logBuffer.value = [];
|
||||
ansiUp.value = new AnsiUp();
|
||||
ansiUp.value.use_classes = true;
|
||||
|
||||
if (!repo) {
|
||||
throw new Error('Unexpected: "repo" should be provided at this place');
|
||||
}
|
||||
|
||||
if (stream.value) {
|
||||
stream.value.close();
|
||||
}
|
||||
|
||||
if (!hasLogs.value || !step.value) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStepFinished(step.value)) {
|
||||
const logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.id);
|
||||
logs?.forEach((line) => writeLog({ index: line.line, text: atob(line.data), time: line.time }));
|
||||
flushLogs(false);
|
||||
}
|
||||
|
||||
if (isStepRunning(step.value)) {
|
||||
stream.value = apiClient.streamLogs(
|
||||
repo.value.owner,
|
||||
repo.value.name,
|
||||
pipeline.value.number,
|
||||
step.value.id,
|
||||
(line) => {
|
||||
writeLog({ index: line.line, text: atob(line.data), time: line.time });
|
||||
flushLogs(true);
|
||||
},
|
||||
);
|
||||
const step = computed(() => pipeline.value && findStep(pipeline.value.steps || [], stepId.value));
|
||||
const stream = ref<EventSource>();
|
||||
const log = ref<LogLine[]>();
|
||||
const consoleElement = ref<Element>();
|
||||
}
|
||||
}
|
||||
|
||||
const loadedLogs = computed(() => !!log.value);
|
||||
const hasLogs = computed(
|
||||
() =>
|
||||
// we do not have logs for skipped steps
|
||||
repo?.value && pipeline.value && step.value && step.value.state !== 'skipped' && step.value.state !== 'killed',
|
||||
);
|
||||
const autoScroll = useStorage('log-auto-scroll', false);
|
||||
const showActions = ref(false);
|
||||
const downloadInProgress = ref(false);
|
||||
const ansiUp = ref(new AnsiUp());
|
||||
ansiUp.value.use_classes = true;
|
||||
const logBuffer = ref<LogLine[]>([]);
|
||||
onMounted(async () => {
|
||||
loadLogs();
|
||||
});
|
||||
|
||||
const maxLineCount = 500; // TODO: think about way to support lazy-loading more than last 300 logs (#776)
|
||||
watch(stepSlug, () => {
|
||||
loadLogs();
|
||||
});
|
||||
|
||||
function formatTime(time?: number): string {
|
||||
return time === undefined ? '' : `${time}s`;
|
||||
watch(step, (oldStep, newStep) => {
|
||||
if (oldStep && oldStep.name === newStep?.name && oldStep?.end_time !== newStep?.end_time) {
|
||||
if (autoScroll.value) {
|
||||
scrollDown();
|
||||
}
|
||||
|
||||
function writeLog(line: LogLine) {
|
||||
logBuffer.value.push({
|
||||
index: line.index ?? 0,
|
||||
text: ansiUp.value.ansi_to_html(line.text),
|
||||
time: line.time ?? 0,
|
||||
});
|
||||
}
|
||||
|
||||
function scrollDown() {
|
||||
nextTick(() => {
|
||||
if (!consoleElement.value) {
|
||||
return;
|
||||
}
|
||||
consoleElement.value.scrollTop = consoleElement.value.scrollHeight;
|
||||
});
|
||||
}
|
||||
|
||||
const flushLogs = debounce((scroll: boolean) => {
|
||||
let buffer = logBuffer.value.slice(-maxLineCount);
|
||||
logBuffer.value = [];
|
||||
|
||||
if (buffer.length === 0) {
|
||||
if (!log.value) {
|
||||
log.value = [];
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// append old logs lines
|
||||
if (buffer.length < maxLineCount && log.value) {
|
||||
buffer = [...log.value.slice(-(maxLineCount - buffer.length)), ...buffer];
|
||||
}
|
||||
|
||||
// deduplicate repeating times
|
||||
buffer = buffer.reduce(
|
||||
(acc, line) => ({
|
||||
lastTime: line.time ?? 0,
|
||||
lines: [
|
||||
...acc.lines,
|
||||
{
|
||||
...line,
|
||||
time: acc.lastTime === line.time ? undefined : line.time,
|
||||
},
|
||||
],
|
||||
}),
|
||||
{ lastTime: -1, lines: [] as LogLine[] },
|
||||
).lines;
|
||||
|
||||
log.value = buffer;
|
||||
|
||||
if (scroll && autoScroll.value) {
|
||||
scrollDown();
|
||||
}
|
||||
}, 500);
|
||||
|
||||
async function download() {
|
||||
if (!repo?.value || !pipeline.value || !step.value) {
|
||||
throw new Error('The repository, pipeline or step was undefined');
|
||||
}
|
||||
let logs;
|
||||
try {
|
||||
downloadInProgress.value = true;
|
||||
logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.pid);
|
||||
} catch (e) {
|
||||
notifications.notifyError(e, i18n.t('repo.pipeline.log_download_error'));
|
||||
return;
|
||||
} finally {
|
||||
downloadInProgress.value = false;
|
||||
}
|
||||
const fileURL = window.URL.createObjectURL(
|
||||
new Blob([logs.map((line) => line.out).join('')], {
|
||||
type: 'text/plain',
|
||||
}),
|
||||
);
|
||||
const fileLink = document.createElement('a');
|
||||
|
||||
fileLink.href = fileURL;
|
||||
fileLink.setAttribute(
|
||||
'download',
|
||||
`${repo.value.owner}-${repo.value.name}-${pipeline.value.number}-${step.value.name}.log`,
|
||||
);
|
||||
document.body.appendChild(fileLink);
|
||||
|
||||
fileLink.click();
|
||||
document.body.removeChild(fileLink);
|
||||
window.URL.revokeObjectURL(fileURL);
|
||||
}
|
||||
|
||||
async function loadLogs() {
|
||||
if (loadedStepSlug.value === stepSlug.value) {
|
||||
return;
|
||||
}
|
||||
loadedStepSlug.value = stepSlug.value;
|
||||
log.value = undefined;
|
||||
logBuffer.value = [];
|
||||
ansiUp.value = new AnsiUp();
|
||||
ansiUp.value.use_classes = true;
|
||||
|
||||
if (!repo) {
|
||||
throw new Error('Unexpected: "repo" should be provided at this place');
|
||||
}
|
||||
|
||||
if (stream.value) {
|
||||
stream.value.close();
|
||||
}
|
||||
|
||||
if (!hasLogs.value || !step.value) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (isStepFinished(step.value)) {
|
||||
const logs = await apiClient.getLogs(repo.value.owner, repo.value.name, pipeline.value.number, step.value.pid);
|
||||
logs?.forEach((line) => writeLog({ index: line.pos, text: line.out, time: line.time }));
|
||||
flushLogs(false);
|
||||
}
|
||||
|
||||
if (isStepRunning(step.value)) {
|
||||
// load stream of parent process (which receives all child processes logs)
|
||||
// TODO: change stream to only send data of single child process
|
||||
stream.value = apiClient.streamLogs(
|
||||
repo.value.owner,
|
||||
repo.value.name,
|
||||
pipeline.value.number,
|
||||
step.value.ppid,
|
||||
(line) => {
|
||||
if (line?.step !== step.value?.name) {
|
||||
return;
|
||||
}
|
||||
writeLog({ index: line.pos, text: line.out, time: line.time });
|
||||
flushLogs(true);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
onMounted(async () => {
|
||||
loadLogs();
|
||||
});
|
||||
|
||||
watch(stepSlug, () => {
|
||||
loadLogs();
|
||||
});
|
||||
|
||||
watch(step, (oldStep, newStep) => {
|
||||
if (oldStep && oldStep.name === newStep?.name && oldStep?.end_time !== newStep?.end_time) {
|
||||
if (autoScroll.value) {
|
||||
scrollDown();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
consoleElement,
|
||||
step,
|
||||
log,
|
||||
loadedLogs,
|
||||
hasLogs,
|
||||
formatTime,
|
||||
showActions,
|
||||
download,
|
||||
downloadInProgress,
|
||||
autoScroll,
|
||||
};
|
||||
},
|
||||
}
|
||||
});
|
||||
</script>
|
||||
|
|
|
@ -129,8 +129,8 @@ export default class WoodpeckerClient extends ApiClient {
|
|||
return this._post(`/api/repos/${owner}/${repo}/pipelines/${pipeline}?${query}`) as Promise<Pipeline>;
|
||||
}
|
||||
|
||||
getLogs(owner: string, repo: string, pipeline: number, step: number): Promise<PipelineLog[]> {
|
||||
return this._get(`/api/repos/${owner}/${repo}/logs/${pipeline}/${step}`) as Promise<PipelineLog[]>;
|
||||
getLogs(owner: string, repo: string, pipeline: number, stepId: number): Promise<PipelineLog[]> {
|
||||
return this._get(`/api/repos/${owner}/${repo}/logs/${pipeline}/${stepId}`) as Promise<PipelineLog[]>;
|
||||
}
|
||||
|
||||
getSecretList(owner: string, repo: string, page: number): Promise<Secret[] | null> {
|
||||
|
@ -300,7 +300,7 @@ export default class WoodpeckerClient extends ApiClient {
|
|||
// eslint-disable-next-line promise/prefer-await-to-callbacks
|
||||
callback: (data: PipelineLog) => void,
|
||||
): EventSource {
|
||||
return this._subscribe(`/stream/logs/${owner}/${repo}/${pipeline}/${step}`, callback, {
|
||||
return this._subscribe(`/api/stream/logs/${owner}/${repo}/${pipeline}/${step}`, callback, {
|
||||
reconnect: true,
|
||||
});
|
||||
}
|
||||
|
|
|
@ -102,6 +102,7 @@ export type PipelineStatus =
|
|||
|
||||
export type PipelineStep = {
|
||||
id: number;
|
||||
uuid: string;
|
||||
pipeline_id: number;
|
||||
pid: number;
|
||||
ppid: number;
|
||||
|
@ -118,10 +119,12 @@ export type PipelineStep = {
|
|||
};
|
||||
|
||||
export type PipelineLog = {
|
||||
step: string;
|
||||
pos: number;
|
||||
out: string;
|
||||
time?: number;
|
||||
id: number;
|
||||
step_id: number;
|
||||
time: number;
|
||||
line: number;
|
||||
data: string; // base64 encoded
|
||||
type: number;
|
||||
};
|
||||
|
||||
export type PipelineFeed = Pipeline & {
|
||||
|
|
|
@ -286,9 +286,9 @@ func (c *client) PipelineKill(owner, name string, num int) error {
|
|||
}
|
||||
|
||||
// PipelineLogs returns the pipeline logs for the specified step.
|
||||
func (c *client) PipelineLogs(owner, name string, num, step int) ([]*Logs, error) {
|
||||
func (c *client) StepLogEntries(owner, name string, num, step int) ([]*LogEntry, error) {
|
||||
uri := fmt.Sprintf(pathLogs, c.addr, owner, name, num, step)
|
||||
var out []*Logs
|
||||
var out []*LogEntry
|
||||
err := c.get(uri, &out)
|
||||
return out, err
|
||||
}
|
||||
|
|
|
@ -33,3 +33,14 @@ const (
|
|||
StatusKilled = "killed"
|
||||
StatusError = "error"
|
||||
)
|
||||
|
||||
// LogEntryType identifies the type of line in the logs.
|
||||
type LogEntryType int
|
||||
|
||||
const (
|
||||
LogEntryStdout LogEntryType = iota
|
||||
LogEntryStderr
|
||||
LogEntryExitCode
|
||||
LogEntryMetadata
|
||||
LogEntryProgress
|
||||
)
|
||||
|
|
|
@ -105,8 +105,8 @@ type Client interface {
|
|||
// PipelineKill force kills the running pipeline.
|
||||
PipelineKill(string, string, int) error
|
||||
|
||||
// PipelineLogs returns the logs for the given pipeline
|
||||
PipelineLogs(string, string, int, int) ([]*Logs, error)
|
||||
// StepLogEntries returns the LogEntries for the given pipeline step
|
||||
StepLogEntries(string, string, int, int) ([]*LogEntry, error)
|
||||
|
||||
// Deploy triggers a deployment for an existing pipeline using the specified
|
||||
// target environment.
|
||||
|
|
|
@ -173,10 +173,14 @@ type (
|
|||
Level string `json:"log-level"`
|
||||
}
|
||||
|
||||
// Logs is the JSON data for a logs response
|
||||
Logs struct {
|
||||
Step string `json:"step"`
|
||||
Output string `json:"out"`
|
||||
// LogEntry is a single log entry
|
||||
LogEntry struct {
|
||||
ID int64 `json:"id"`
|
||||
StepID int64 `json:"step_id"`
|
||||
Time int64 `json:"time"`
|
||||
Line int `json:"line"`
|
||||
Data []byte `json:"data"`
|
||||
Type LogEntryType `json:"type"`
|
||||
}
|
||||
|
||||
// Cron is the JSON data of a cron job
|
||||
|
|
Loading…
Reference in a new issue