mirror of
https://github.com/woodpecker-ci/woodpecker.git
synced 2025-02-10 08:32:24 +00:00
Unify DB tables/columns (#3806)
Co-authored-by: Anbraten <6918444+anbraten@users.noreply.github.com>
This commit is contained in:
parent
b8b6efb352
commit
92cd0d04a3
41 changed files with 1005 additions and 260 deletions
2
Makefile
2
Makefile
|
@ -173,7 +173,7 @@ test-cli: ## Test cli code
|
||||||
|
|
||||||
test-server-datastore: ## Test server datastore
|
test-server-datastore: ## Test server datastore
|
||||||
go test -timeout 120s -tags 'test $(TAGS)' -run TestMigrate go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
go test -timeout 120s -tags 'test $(TAGS)' -run TestMigrate go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
||||||
go test -race -timeout 30s -tags 'test $(TAGS)' -skip TestMigrate go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
go test -race -timeout 45s -tags 'test $(TAGS)' -skip TestMigrate go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
||||||
|
|
||||||
test-server-datastore-coverage: ## Test server datastore with coverage report
|
test-server-datastore-coverage: ## Test server datastore with coverage report
|
||||||
go test -race -cover -coverprofile datastore-coverage.out -timeout 180s -tags 'test $(TAGS)' go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
go test -race -cover -coverprofile datastore-coverage.out -timeout 180s -tags 'test $(TAGS)' go.woodpecker-ci.org/woodpecker/v2/server/store/...
|
||||||
|
|
|
@ -4014,6 +4014,7 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"created_at": {
|
"created_at": {
|
||||||
|
"description": "TODO change JSON field to \"created\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"creator_id": {
|
"creator_id": {
|
||||||
|
@ -4056,12 +4057,14 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"created_at": {
|
"created_at": {
|
||||||
|
"description": "TODO change JSON field to \"created\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"event": {
|
"event": {
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"finished_at": {
|
"finished_at": {
|
||||||
|
"description": "TODO change JSON field to \"finished\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"id": {
|
"id": {
|
||||||
|
@ -4083,6 +4086,7 @@ const docTemplate = `{
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"started_at": {
|
"started_at": {
|
||||||
|
"description": "TODO change JSON field to \"started\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
|
@ -4240,6 +4244,7 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"created_at": {
|
"created_at": {
|
||||||
|
"description": "TODO change JSON field to \"created\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"deploy_task": {
|
"deploy_task": {
|
||||||
|
@ -4258,6 +4263,7 @@ const docTemplate = `{
|
||||||
"$ref": "#/definitions/WebhookEvent"
|
"$ref": "#/definitions/WebhookEvent"
|
||||||
},
|
},
|
||||||
"finished_at": {
|
"finished_at": {
|
||||||
|
"description": "TODO change JSON field to \"finished\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"forge_url": {
|
"forge_url": {
|
||||||
|
@ -4291,6 +4297,7 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"reviewed_at": {
|
"reviewed_at": {
|
||||||
|
"description": "TODO change JSON field to \"reviewed\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"reviewed_by": {
|
"reviewed_by": {
|
||||||
|
@ -4301,6 +4308,7 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"started_at": {
|
"started_at": {
|
||||||
|
"description": "TODO change JSON field to \"started\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"status": {
|
"status": {
|
||||||
|
@ -4313,6 +4321,7 @@ const docTemplate = `{
|
||||||
"type": "string"
|
"type": "string"
|
||||||
},
|
},
|
||||||
"updated_at": {
|
"updated_at": {
|
||||||
|
"description": "TODO change JSON field to \"updated\" in 3.0",
|
||||||
"type": "integer"
|
"type": "integer"
|
||||||
},
|
},
|
||||||
"variables": {
|
"variables": {
|
||||||
|
|
|
@ -36,4 +36,4 @@ The following list contains some tools and frameworks used by the Woodpecker UI.
|
||||||
Woodpecker uses [Vue I18n](https://vue-i18n.intlify.dev/) as translation library. New translations have to be added to `web/src/assets/locales/en.json`. The English source file will be automatically imported into [Weblate](https://translate.woodpecker-ci.org/) (the translation system used by Woodpecker) where all other languages will be translated by the community based on the English source.
|
Woodpecker uses [Vue I18n](https://vue-i18n.intlify.dev/) as translation library. New translations have to be added to `web/src/assets/locales/en.json`. The English source file will be automatically imported into [Weblate](https://translate.woodpecker-ci.org/) (the translation system used by Woodpecker) where all other languages will be translated by the community based on the English source.
|
||||||
You must not provide translations except English in PRs, otherwise weblate could put git into conflicts (when someone has translated in that language file and changes are not into main branch yet)
|
You must not provide translations except English in PRs, otherwise weblate could put git into conflicts (when someone has translated in that language file and changes are not into main branch yet)
|
||||||
|
|
||||||
For more information about translations see [Translations](./07-translations.md).
|
For more information about translations see [Translations](./08-translations.md).
|
||||||
|
|
7
docs/docs/92-development/06-conventions.md
Normal file
7
docs/docs/92-development/06-conventions.md
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# Conventions
|
||||||
|
|
||||||
|
## Database naming
|
||||||
|
|
||||||
|
Database tables are named plural, columns don't have any prefix.
|
||||||
|
|
||||||
|
Example: Table name `agent`, columns `id`, `name`.
|
|
@ -17,15 +17,23 @@ package model
|
||||||
|
|
||||||
// Config represents a pipeline configuration.
|
// Config represents a pipeline configuration.
|
||||||
type Config struct {
|
type Config struct {
|
||||||
ID int64 `json:"-" xorm:"pk autoincr 'config_id'"`
|
ID int64 `json:"-" xorm:"pk autoincr 'id'"`
|
||||||
RepoID int64 `json:"-" xorm:"UNIQUE(s) 'config_repo_id'"`
|
RepoID int64 `json:"-" xorm:"UNIQUE(s) 'repo_id'"`
|
||||||
Hash string `json:"hash" xorm:"UNIQUE(s) 'config_hash'"`
|
Hash string `json:"hash" xorm:"UNIQUE(s) 'hash'"`
|
||||||
Name string `json:"name" xorm:"UNIQUE(s) 'config_name'"`
|
Name string `json:"name" xorm:"UNIQUE(s) 'name'"`
|
||||||
Data []byte `json:"data" xorm:"LONGBLOB 'config_data'"`
|
Data []byte `json:"data" xorm:"LONGBLOB 'data'"`
|
||||||
} // @name Config
|
} // @name Config
|
||||||
|
|
||||||
|
func (Config) TableName() string {
|
||||||
|
return "configs"
|
||||||
|
}
|
||||||
|
|
||||||
// PipelineConfig is the n:n relation between Pipeline and Config.
|
// PipelineConfig is the n:n relation between Pipeline and Config.
|
||||||
type PipelineConfig struct {
|
type PipelineConfig struct {
|
||||||
ConfigID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'config_id'"`
|
ConfigID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'config_id'"`
|
||||||
PipelineID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'pipeline_id'"`
|
PipelineID int64 `json:"-" xorm:"UNIQUE(s) NOT NULL 'pipeline_id'"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (PipelineConfig) TableName() string {
|
||||||
|
return "pipeline_configs"
|
||||||
|
}
|
||||||
|
|
|
@ -21,14 +21,14 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Cron struct {
|
type Cron struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
Name string `json:"name" xorm:"UNIQUE(s) INDEX"`
|
Name string `json:"name" xorm:"name UNIQUE(s) INDEX"`
|
||||||
RepoID int64 `json:"repo_id" xorm:"repo_id UNIQUE(s) INDEX"`
|
RepoID int64 `json:"repo_id" xorm:"repo_id UNIQUE(s) INDEX"`
|
||||||
CreatorID int64 `json:"creator_id" xorm:"creator_id INDEX"`
|
CreatorID int64 `json:"creator_id" xorm:"creator_id INDEX"`
|
||||||
NextExec int64 `json:"next_exec"`
|
NextExec int64 `json:"next_exec" xorm:"next_exec"`
|
||||||
Schedule string `json:"schedule" xorm:"NOT NULL"` // @weekly, 3min, ...
|
Schedule string `json:"schedule" xorm:"schedule NOT NULL"` // @weekly, 3min, ...
|
||||||
Created int64 `json:"created_at" xorm:"created NOT NULL DEFAULT 0"`
|
Created int64 `json:"created_at" xorm:"created NOT NULL DEFAULT 0"` // TODO change JSON field to "created" in 3.0
|
||||||
Branch string `json:"branch"`
|
Branch string `json:"branch" xorm:"branch"`
|
||||||
} // @name Cron
|
} // @name Cron
|
||||||
|
|
||||||
// TableName returns the database table name for xorm.
|
// TableName returns the database table name for xorm.
|
||||||
|
|
|
@ -17,21 +17,21 @@ package model
|
||||||
|
|
||||||
// Feed represents an item in the user's feed or timeline.
|
// Feed represents an item in the user's feed or timeline.
|
||||||
type Feed struct {
|
type Feed struct {
|
||||||
RepoID int64 `json:"repo_id" xorm:"feed_repo_id"`
|
RepoID int64 `json:"repo_id" xorm:"repo_id"`
|
||||||
ID int64 `json:"id,omitempty" xorm:"feed_pipeline_id"`
|
ID int64 `json:"id,omitempty" xorm:"pipeline_id"`
|
||||||
Number int64 `json:"number,omitempty" xorm:"feed_pipeline_number"`
|
Number int64 `json:"number,omitempty" xorm:"pipeline_number"`
|
||||||
Event string `json:"event,omitempty" xorm:"feed_pipeline_event"`
|
Event string `json:"event,omitempty" xorm:"pipeline_event"`
|
||||||
Status string `json:"status,omitempty" xorm:"feed_pipeline_status"`
|
Status string `json:"status,omitempty" xorm:"pipeline_status"`
|
||||||
Created int64 `json:"created_at,omitempty" xorm:"feed_pipeline_created"`
|
Created int64 `json:"created_at,omitempty" xorm:"pipeline_created"` // TODO change JSON field to "created" in 3.0
|
||||||
Started int64 `json:"started_at,omitempty" xorm:"feed_pipeline_started"`
|
Started int64 `json:"started_at,omitempty" xorm:"pipeline_started"` // TODO change JSON field to "started" in 3.0
|
||||||
Finished int64 `json:"finished_at,omitempty" xorm:"feed_pipeline_finished"`
|
Finished int64 `json:"finished_at,omitempty" xorm:"pipeline_finished"` // TODO change JSON field to "finished" in 3.0
|
||||||
Commit string `json:"commit,omitempty" xorm:"feed_pipeline_commit"`
|
Commit string `json:"commit,omitempty" xorm:"pipeline_commit"`
|
||||||
Branch string `json:"branch,omitempty" xorm:"feed_pipeline_branch"`
|
Branch string `json:"branch,omitempty" xorm:"pipeline_branch"`
|
||||||
Ref string `json:"ref,omitempty" xorm:"feed_pipeline_ref"`
|
Ref string `json:"ref,omitempty" xorm:"pipeline_ref"`
|
||||||
Refspec string `json:"refspec,omitempty" xorm:"feed_pipeline_refspec"`
|
Refspec string `json:"refspec,omitempty" xorm:"pipeline_refspec"`
|
||||||
Title string `json:"title,omitempty" xorm:"feed_pipeline_title"`
|
Title string `json:"title,omitempty" xorm:"pipeline_title"`
|
||||||
Message string `json:"message,omitempty" xorm:"feed_pipeline_message"`
|
Message string `json:"message,omitempty" xorm:"pipeline_message"`
|
||||||
Author string `json:"author,omitempty" xorm:"feed_pipeline_author"`
|
Author string `json:"author,omitempty" xorm:"pipeline_author"`
|
||||||
Avatar string `json:"author_avatar,omitempty" xorm:"feed_pipeline_avatar"`
|
Avatar string `json:"author_avatar,omitempty" xorm:"pipeline_avatar"`
|
||||||
Email string `json:"author_email,omitempty" xorm:"feed_pipeline_email"`
|
Email string `json:"author_email,omitempty" xorm:"pipeline_email"`
|
||||||
} // @name Feed
|
} // @name Feed
|
||||||
|
|
|
@ -37,6 +37,11 @@ type Forge struct {
|
||||||
AdditionalOptions map[string]any `json:"additional_options,omitempty" xorm:"json"`
|
AdditionalOptions map[string]any `json:"additional_options,omitempty" xorm:"json"`
|
||||||
} // @name Forge
|
} // @name Forge
|
||||||
|
|
||||||
|
// TableName returns the database table name for xorm.
|
||||||
|
func (Forge) TableName() string {
|
||||||
|
return "forges"
|
||||||
|
}
|
||||||
|
|
||||||
// PublicCopy returns a copy of the forge without sensitive information and technical details.
|
// PublicCopy returns a copy of the forge without sensitive information and technical details.
|
||||||
func (f *Forge) PublicCopy() *Forge {
|
func (f *Forge) PublicCopy() *Forge {
|
||||||
forge := &Forge{
|
forge := &Forge{
|
||||||
|
|
|
@ -28,11 +28,11 @@ const (
|
||||||
type LogEntry struct {
|
type LogEntry struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
StepID int64 `json:"step_id" xorm:"INDEX 'step_id'"`
|
StepID int64 `json:"step_id" xorm:"INDEX 'step_id'"`
|
||||||
Time int64 `json:"time"`
|
Time int64 `json:"time" xorm:"'time'"`
|
||||||
Line int `json:"line"`
|
Line int `json:"line" xorm:"'line'"`
|
||||||
Data []byte `json:"data" xorm:"LONGBLOB"`
|
Data []byte `json:"data" xorm:"LONGBLOB"`
|
||||||
Created int64 `json:"-" xorm:"created"`
|
Created int64 `json:"-" xorm:"created"`
|
||||||
Type LogEntryType `json:"type"`
|
Type LogEntryType `json:"type" xorm:"'type'"`
|
||||||
} // @name LogEntry
|
} // @name LogEntry
|
||||||
|
|
||||||
// TODO: store info what specific command the line belongs to (must be optional and impl. by backend)
|
// TODO: store info what specific command the line belongs to (must be optional and impl. by backend)
|
||||||
|
|
|
@ -17,13 +17,13 @@ package model
|
||||||
|
|
||||||
// Perm defines a repository permission for an individual user.
|
// Perm defines a repository permission for an individual user.
|
||||||
type Perm struct {
|
type Perm struct {
|
||||||
UserID int64 `json:"-" xorm:"UNIQUE(s) INDEX NOT NULL 'perm_user_id'"`
|
UserID int64 `json:"-" xorm:"UNIQUE(s) INDEX NOT NULL 'user_id'"`
|
||||||
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX NOT NULL 'perm_repo_id'"`
|
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX NOT NULL 'repo_id'"`
|
||||||
Repo *Repo `json:"-" xorm:"-"`
|
Repo *Repo `json:"-" xorm:"-"`
|
||||||
Pull bool `json:"pull" xorm:"perm_pull"`
|
Pull bool `json:"pull" xorm:"pull"`
|
||||||
Push bool `json:"push" xorm:"perm_push"`
|
Push bool `json:"push" xorm:"push"`
|
||||||
Admin bool `json:"admin" xorm:"perm_admin"`
|
Admin bool `json:"admin" xorm:"admin"`
|
||||||
Synced int64 `json:"synced" xorm:"perm_synced"`
|
Synced int64 `json:"synced" xorm:"synced"`
|
||||||
Created int64 `json:"created" xorm:"created"`
|
Created int64 `json:"created" xorm:"created"`
|
||||||
Updated int64 `json:"updated" xorm:"updated"`
|
Updated int64 `json:"updated" xorm:"updated"`
|
||||||
} // @name Perm
|
} // @name Perm
|
||||||
|
|
|
@ -20,50 +20,50 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
type Pipeline struct {
|
type Pipeline struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'pipeline_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'pipeline_repo_id'"`
|
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'repo_id'"`
|
||||||
Number int64 `json:"number" xorm:"UNIQUE(s) 'pipeline_number'"`
|
Number int64 `json:"number" xorm:"UNIQUE(s) 'number'"`
|
||||||
Author string `json:"author" xorm:"INDEX 'pipeline_author'"`
|
Author string `json:"author" xorm:"INDEX 'author'"`
|
||||||
Parent int64 `json:"parent" xorm:"pipeline_parent"`
|
Parent int64 `json:"parent" xorm:"parent"`
|
||||||
Event WebhookEvent `json:"event" xorm:"pipeline_event"`
|
Event WebhookEvent `json:"event" xorm:"event"`
|
||||||
Status StatusValue `json:"status" xorm:"INDEX 'pipeline_status'"`
|
Status StatusValue `json:"status" xorm:"INDEX 'status'"`
|
||||||
Errors []*types.PipelineError `json:"errors" xorm:"json 'pipeline_errors'"`
|
Errors []*types.PipelineError `json:"errors" xorm:"json 'errors'"`
|
||||||
Created int64 `json:"created_at" xorm:"pipeline_created"`
|
Created int64 `json:"created_at" xorm:"'created' NOT NULL DEFAULT 0 created"` // TODO change JSON field to "created" in 3.0
|
||||||
Updated int64 `json:"updated_at" xorm:"updated NOT NULL DEFAULT 0 'updated'"`
|
Updated int64 `json:"updated_at" xorm:"'updated' NOT NULL DEFAULT 0 updated"` // TODO change JSON field to "updated" in 3.0
|
||||||
Started int64 `json:"started_at" xorm:"pipeline_started"`
|
Started int64 `json:"started_at" xorm:"started"` // TODO change JSON field to "started" in 3.0
|
||||||
Finished int64 `json:"finished_at" xorm:"pipeline_finished"`
|
Finished int64 `json:"finished_at" xorm:"finished"` // TODO change JSON field to "finished" in 3.0
|
||||||
Deploy string `json:"deploy_to" xorm:"pipeline_deploy"`
|
Deploy string `json:"deploy_to" xorm:"deploy"`
|
||||||
DeployTask string `json:"deploy_task" xorm:"pipeline_deploy_task"`
|
DeployTask string `json:"deploy_task" xorm:"deploy_task"`
|
||||||
Commit string `json:"commit" xorm:"pipeline_commit"`
|
Commit string `json:"commit" xorm:"commit"`
|
||||||
Branch string `json:"branch" xorm:"pipeline_branch"`
|
Branch string `json:"branch" xorm:"branch"`
|
||||||
Ref string `json:"ref" xorm:"pipeline_ref"`
|
Ref string `json:"ref" xorm:"ref"`
|
||||||
Refspec string `json:"refspec" xorm:"pipeline_refspec"`
|
Refspec string `json:"refspec" xorm:"refspec"`
|
||||||
Title string `json:"title" xorm:"pipeline_title"`
|
Title string `json:"title" xorm:"title"`
|
||||||
Message string `json:"message" xorm:"TEXT 'pipeline_message'"`
|
Message string `json:"message" xorm:"TEXT 'message'"`
|
||||||
Timestamp int64 `json:"timestamp" xorm:"pipeline_timestamp"`
|
Timestamp int64 `json:"timestamp" xorm:"'timestamp'"`
|
||||||
Sender string `json:"sender" xorm:"pipeline_sender"` // uses reported user for webhooks and name of cron for cron pipelines
|
Sender string `json:"sender" xorm:"sender"` // uses reported user for webhooks and name of cron for cron pipelines
|
||||||
Avatar string `json:"author_avatar" xorm:"pipeline_avatar"`
|
Avatar string `json:"author_avatar" xorm:"avatar"`
|
||||||
Email string `json:"author_email" xorm:"pipeline_email"`
|
Email string `json:"author_email" xorm:"email"`
|
||||||
ForgeURL string `json:"forge_url" xorm:"pipeline_forge_url"`
|
ForgeURL string `json:"forge_url" xorm:"forge_url"`
|
||||||
Reviewer string `json:"reviewed_by" xorm:"pipeline_reviewer"`
|
Reviewer string `json:"reviewed_by" xorm:"reviewer"`
|
||||||
Reviewed int64 `json:"reviewed_at" xorm:"pipeline_reviewed"`
|
Reviewed int64 `json:"reviewed_at" xorm:"reviewed"` // TODO change JSON field to "reviewed" in 3.0
|
||||||
Workflows []*Workflow `json:"workflows,omitempty" xorm:"-"`
|
Workflows []*Workflow `json:"workflows,omitempty" xorm:"-"`
|
||||||
ChangedFiles []string `json:"changed_files,omitempty" xorm:"LONGTEXT 'changed_files'"`
|
ChangedFiles []string `json:"changed_files,omitempty" xorm:"LONGTEXT 'changed_files'"`
|
||||||
AdditionalVariables map[string]string `json:"variables,omitempty" xorm:"json 'additional_variables'"`
|
AdditionalVariables map[string]string `json:"variables,omitempty" xorm:"json 'additional_variables'"`
|
||||||
PullRequestLabels []string `json:"pr_labels,omitempty" xorm:"json 'pr_labels'"`
|
PullRequestLabels []string `json:"pr_labels,omitempty" xorm:"json 'pr_labels'"`
|
||||||
IsPrerelease bool `json:"is_prerelease,omitempty" xorm:"is_prerelease"`
|
IsPrerelease bool `json:"is_prerelease,omitempty" xorm:"is_prerelease"`
|
||||||
} // @name Pipeline
|
} // @name Pipeline
|
||||||
|
|
||||||
type PipelineFilter struct {
|
|
||||||
Before int64
|
|
||||||
After int64
|
|
||||||
}
|
|
||||||
|
|
||||||
// TableName return database table name for xorm.
|
// TableName return database table name for xorm.
|
||||||
func (Pipeline) TableName() string {
|
func (Pipeline) TableName() string {
|
||||||
return "pipelines"
|
return "pipelines"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PipelineFilter struct {
|
||||||
|
Before int64
|
||||||
|
After int64
|
||||||
|
}
|
||||||
|
|
||||||
// IsMultiPipeline checks if step list contain more than one parent step.
|
// IsMultiPipeline checks if step list contain more than one parent step.
|
||||||
func (p Pipeline) IsMultiPipeline() bool {
|
func (p Pipeline) IsMultiPipeline() bool {
|
||||||
return len(p.Workflows) > 1
|
return len(p.Workflows) > 1
|
||||||
|
|
|
@ -15,7 +15,7 @@
|
||||||
package model
|
package model
|
||||||
|
|
||||||
type Redirection struct {
|
type Redirection struct {
|
||||||
ID int64 `xorm:"pk autoincr 'redirection_id'"`
|
ID int64 `xorm:"pk autoincr 'id'"`
|
||||||
RepoID int64 `xorm:"'repo_id'"`
|
RepoID int64 `xorm:"'repo_id'"`
|
||||||
FullName string `xorm:"UNIQUE INDEX 'repo_full_name'"`
|
FullName string `xorm:"UNIQUE INDEX 'repo_full_name'"`
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,13 +28,17 @@ var (
|
||||||
|
|
||||||
// Registry represents a docker registry with credentials.
|
// Registry represents a docker registry with credentials.
|
||||||
type Registry struct {
|
type Registry struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'registry_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'registry_repo_id'"`
|
RepoID int64 `json:"-" xorm:"UNIQUE(s) INDEX 'repo_id'"`
|
||||||
Address string `json:"address" xorm:"UNIQUE(s) INDEX 'registry_addr'"`
|
Address string `json:"address" xorm:"UNIQUE(s) INDEX 'address'"`
|
||||||
Username string `json:"username" xorm:"varchar(2000) 'registry_username'"`
|
Username string `json:"username" xorm:"varchar(2000) 'username'"`
|
||||||
Password string `json:"password" xorm:"TEXT 'registry_password'"`
|
Password string `json:"password" xorm:"TEXT 'password'"`
|
||||||
} // @name Registry
|
} // @name Registry
|
||||||
|
|
||||||
|
func (r Registry) TableName() string {
|
||||||
|
return "registries"
|
||||||
|
}
|
||||||
|
|
||||||
// Validate validates the registry information.
|
// Validate validates the registry information.
|
||||||
func (r *Registry) Validate() error {
|
func (r *Registry) Validate() error {
|
||||||
switch {
|
switch {
|
||||||
|
|
|
@ -22,32 +22,32 @@ import (
|
||||||
|
|
||||||
// Repo represents a repository.
|
// Repo represents a repository.
|
||||||
type Repo struct {
|
type Repo struct {
|
||||||
ID int64 `json:"id,omitempty" xorm:"pk autoincr 'repo_id'"`
|
ID int64 `json:"id,omitempty" xorm:"pk autoincr 'id'"`
|
||||||
UserID int64 `json:"-" xorm:"repo_user_id"`
|
UserID int64 `json:"-" xorm:"user_id"`
|
||||||
ForgeID int64 `json:"forge_id,omitempty" xorm:"forge_id"`
|
ForgeID int64 `json:"forge_id,omitempty" xorm:"forge_id"`
|
||||||
// ForgeRemoteID is the unique identifier for the repository on the forge.
|
// ForgeRemoteID is the unique identifier for the repository on the forge.
|
||||||
ForgeRemoteID ForgeRemoteID `json:"forge_remote_id" xorm:"forge_remote_id"`
|
ForgeRemoteID ForgeRemoteID `json:"forge_remote_id" xorm:"forge_remote_id"`
|
||||||
OrgID int64 `json:"org_id" xorm:"repo_org_id"`
|
OrgID int64 `json:"org_id" xorm:"org_id"`
|
||||||
Owner string `json:"owner" xorm:"UNIQUE(name) 'repo_owner'"`
|
Owner string `json:"owner" xorm:"UNIQUE(name) 'owner'"`
|
||||||
Name string `json:"name" xorm:"UNIQUE(name) 'repo_name'"`
|
Name string `json:"name" xorm:"UNIQUE(name) 'name'"`
|
||||||
FullName string `json:"full_name" xorm:"UNIQUE 'repo_full_name'"`
|
FullName string `json:"full_name" xorm:"UNIQUE 'full_name'"`
|
||||||
Avatar string `json:"avatar_url,omitempty" xorm:"varchar(500) 'repo_avatar'"`
|
Avatar string `json:"avatar_url,omitempty" xorm:"varchar(500) 'avatar'"`
|
||||||
ForgeURL string `json:"forge_url,omitempty" xorm:"varchar(1000) 'repo_forge_url'"`
|
ForgeURL string `json:"forge_url,omitempty" xorm:"varchar(1000) 'forge_url'"`
|
||||||
Clone string `json:"clone_url,omitempty" xorm:"varchar(1000) 'repo_clone'"`
|
Clone string `json:"clone_url,omitempty" xorm:"varchar(1000) 'clone'"`
|
||||||
CloneSSH string `json:"clone_url_ssh" xorm:"varchar(1000) 'repo_clone_ssh'"`
|
CloneSSH string `json:"clone_url_ssh" xorm:"varchar(1000) 'clone_ssh'"`
|
||||||
Branch string `json:"default_branch,omitempty" xorm:"varchar(500) 'repo_branch'"`
|
Branch string `json:"default_branch,omitempty" xorm:"varchar(500) 'branch'"`
|
||||||
SCMKind SCMKind `json:"scm,omitempty" xorm:"varchar(50) 'repo_scm'"`
|
SCMKind SCMKind `json:"scm,omitempty" xorm:"varchar(50) 'scm'"`
|
||||||
PREnabled bool `json:"pr_enabled" xorm:"DEFAULT TRUE 'repo_pr_enabled'"`
|
PREnabled bool `json:"pr_enabled" xorm:"DEFAULT TRUE 'pr_enabled'"`
|
||||||
Timeout int64 `json:"timeout,omitempty" xorm:"repo_timeout"`
|
Timeout int64 `json:"timeout,omitempty" xorm:"timeout"`
|
||||||
Visibility RepoVisibility `json:"visibility" xorm:"varchar(10) 'repo_visibility'"`
|
Visibility RepoVisibility `json:"visibility" xorm:"varchar(10) 'visibility'"`
|
||||||
IsSCMPrivate bool `json:"private" xorm:"repo_private"`
|
IsSCMPrivate bool `json:"private" xorm:"private"`
|
||||||
IsTrusted bool `json:"trusted" xorm:"repo_trusted"`
|
IsTrusted bool `json:"trusted" xorm:"trusted"`
|
||||||
IsGated bool `json:"gated" xorm:"repo_gated"`
|
IsGated bool `json:"gated" xorm:"gated"`
|
||||||
IsActive bool `json:"active" xorm:"repo_active"`
|
IsActive bool `json:"active" xorm:"active"`
|
||||||
AllowPull bool `json:"allow_pr" xorm:"repo_allow_pr"`
|
AllowPull bool `json:"allow_pr" xorm:"allow_pr"`
|
||||||
AllowDeploy bool `json:"allow_deploy" xorm:"repo_allow_deploy"`
|
AllowDeploy bool `json:"allow_deploy" xorm:"allow_deploy"`
|
||||||
Config string `json:"config_file" xorm:"varchar(500) 'repo_config_path'"`
|
Config string `json:"config_file" xorm:"varchar(500) 'config_path'"`
|
||||||
Hash string `json:"-" xorm:"varchar(500) 'repo_hash'"`
|
Hash string `json:"-" xorm:"varchar(500) 'hash'"`
|
||||||
Perm *Perm `json:"-" xorm:"-"`
|
Perm *Perm `json:"-" xorm:"-"`
|
||||||
CancelPreviousPipelineEvents []WebhookEvent `json:"cancel_previous_pipeline_events" xorm:"json 'cancel_previous_pipeline_events'"`
|
CancelPreviousPipelineEvents []WebhookEvent `json:"cancel_previous_pipeline_events" xorm:"json 'cancel_previous_pipeline_events'"`
|
||||||
NetrcOnlyTrusted bool `json:"netrc_only_trusted" xorm:"NOT NULL DEFAULT true 'netrc_only_trusted'"`
|
NetrcOnlyTrusted bool `json:"netrc_only_trusted" xorm:"NOT NULL DEFAULT true 'netrc_only_trusted'"`
|
||||||
|
|
|
@ -45,13 +45,13 @@ type SecretStore interface {
|
||||||
|
|
||||||
// Secret represents a secret variable, such as a password or token.
|
// Secret represents a secret variable, such as a password or token.
|
||||||
type Secret struct {
|
type Secret struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'secret_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
OrgID int64 `json:"org_id" xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'secret_org_id'"`
|
OrgID int64 `json:"org_id" xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'org_id'"`
|
||||||
RepoID int64 `json:"repo_id" xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'secret_repo_id'"`
|
RepoID int64 `json:"repo_id" xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'repo_id'"`
|
||||||
Name string `json:"name" xorm:"NOT NULL UNIQUE(s) INDEX 'secret_name'"`
|
Name string `json:"name" xorm:"NOT NULL UNIQUE(s) INDEX 'name'"`
|
||||||
Value string `json:"value,omitempty" xorm:"TEXT 'secret_value'"`
|
Value string `json:"value,omitempty" xorm:"TEXT 'value'"`
|
||||||
Images []string `json:"images" xorm:"json 'secret_images'"`
|
Images []string `json:"images" xorm:"json 'images'"`
|
||||||
Events []WebhookEvent `json:"events" xorm:"json 'secret_events'"`
|
Events []WebhookEvent `json:"events" xorm:"json 'events'"`
|
||||||
} // @name Secret
|
} // @name Secret
|
||||||
|
|
||||||
// TableName return database table name for xorm.
|
// TableName return database table name for xorm.
|
||||||
|
|
|
@ -16,6 +16,11 @@ package model
|
||||||
|
|
||||||
// ServerConfig represents a key-value pair for storing server configurations.
|
// ServerConfig represents a key-value pair for storing server configurations.
|
||||||
type ServerConfig struct {
|
type ServerConfig struct {
|
||||||
Key string `json:"key" xorm:"pk"`
|
Key string `json:"key" xorm:"pk 'key'"`
|
||||||
Value string `json:"value" xorm:""`
|
Value string `json:"value" xorm:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// TableName return database table name for xorm.
|
||||||
|
func (ServerConfig) TableName() string {
|
||||||
|
return "server_configs"
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,19 +26,19 @@ const (
|
||||||
|
|
||||||
// Step represents a process in the pipeline.
|
// Step represents a process in the pipeline.
|
||||||
type Step struct {
|
type Step struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'step_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
UUID string `json:"uuid" xorm:"INDEX 'step_uuid'"`
|
UUID string `json:"uuid" xorm:"INDEX 'uuid'"`
|
||||||
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"`
|
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'pipeline_id'"`
|
||||||
PID int `json:"pid" xorm:"UNIQUE(s) 'step_pid'"`
|
PID int `json:"pid" xorm:"UNIQUE(s) 'pid'"`
|
||||||
PPID int `json:"ppid" xorm:"step_ppid"`
|
PPID int `json:"ppid" xorm:"ppid"`
|
||||||
Name string `json:"name" xorm:"step_name"`
|
Name string `json:"name" xorm:"name"`
|
||||||
State StatusValue `json:"state" xorm:"step_state"`
|
State StatusValue `json:"state" xorm:"state"`
|
||||||
Error string `json:"error,omitempty" xorm:"TEXT 'step_error'"`
|
Error string `json:"error,omitempty" xorm:"TEXT 'error'"`
|
||||||
Failure string `json:"-" xorm:"step_failure"`
|
Failure string `json:"-" xorm:"failure"`
|
||||||
ExitCode int `json:"exit_code" xorm:"step_exit_code"`
|
ExitCode int `json:"exit_code" xorm:"exit_code"`
|
||||||
Started int64 `json:"start_time,omitempty" xorm:"step_started"`
|
Started int64 `json:"start_time,omitempty" xorm:"started"`
|
||||||
Stopped int64 `json:"end_time,omitempty" xorm:"step_stopped"`
|
Stopped int64 `json:"end_time,omitempty" xorm:"stopped"`
|
||||||
Type StepType `json:"type,omitempty" xorm:"step_type"`
|
Type StepType `json:"type,omitempty" xorm:"type"`
|
||||||
} // @name Step
|
} // @name Step
|
||||||
|
|
||||||
// TableName return database table name for xorm.
|
// TableName return database table name for xorm.
|
||||||
|
|
|
@ -21,12 +21,12 @@ import (
|
||||||
|
|
||||||
// Task defines scheduled pipeline Task.
|
// Task defines scheduled pipeline Task.
|
||||||
type Task struct {
|
type Task struct {
|
||||||
ID string `json:"id" xorm:"PK UNIQUE 'task_id'"`
|
ID string `json:"id" xorm:"PK UNIQUE 'id'"`
|
||||||
Data []byte `json:"data" xorm:"LONGBLOB 'task_data'"`
|
Data []byte `json:"data" xorm:"LONGBLOB 'data'"`
|
||||||
Labels map[string]string `json:"labels" xorm:"json 'task_labels'"`
|
Labels map[string]string `json:"labels" xorm:"json 'labels'"`
|
||||||
Dependencies []string `json:"dependencies" xorm:"json 'task_dependencies'"`
|
Dependencies []string `json:"dependencies" xorm:"json 'dependencies'"`
|
||||||
RunOn []string `json:"run_on" xorm:"json 'task_run_on'"`
|
RunOn []string `json:"run_on" xorm:"json 'run_on'"`
|
||||||
DepStatus map[string]StatusValue `json:"dep_status" xorm:"json 'task_dep_status'"`
|
DepStatus map[string]StatusValue `json:"dep_status" xorm:"json 'dependencies_status'"`
|
||||||
AgentID int64 `json:"agent_id" xorm:"'agent_id'"`
|
AgentID int64 `json:"agent_id" xorm:"'agent_id'"`
|
||||||
} // @name Task
|
} // @name Task
|
||||||
|
|
||||||
|
|
|
@ -32,7 +32,7 @@ type User struct {
|
||||||
// the id for this user.
|
// the id for this user.
|
||||||
//
|
//
|
||||||
// required: true
|
// required: true
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'user_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
|
|
||||||
ForgeID int64 `json:"forge_id,omitempty" xorm:"forge_id"`
|
ForgeID int64 `json:"forge_id,omitempty" xorm:"forge_id"`
|
||||||
|
|
||||||
|
@ -41,36 +41,36 @@ type User struct {
|
||||||
// Login is the username for this user.
|
// Login is the username for this user.
|
||||||
//
|
//
|
||||||
// required: true
|
// required: true
|
||||||
Login string `json:"login" xorm:"UNIQUE 'user_login'"`
|
Login string `json:"login" xorm:"UNIQUE 'login'"`
|
||||||
|
|
||||||
// Token is the oauth2 token.
|
// Token is the oauth2 token.
|
||||||
Token string `json:"-" xorm:"TEXT 'user_token'"`
|
Token string `json:"-" xorm:"TEXT 'token'"`
|
||||||
|
|
||||||
// Secret is the oauth2 token secret.
|
// Secret is the oauth2 token secret.
|
||||||
Secret string `json:"-" xorm:"TEXT 'user_secret'"`
|
Secret string `json:"-" xorm:"TEXT 'secret'"`
|
||||||
|
|
||||||
// Expiry is the token and secret expiration timestamp.
|
// Expiry is the token and secret expiration timestamp.
|
||||||
Expiry int64 `json:"-" xorm:"user_expiry"`
|
Expiry int64 `json:"-" xorm:"expiry"`
|
||||||
|
|
||||||
// Email is the email address for this user.
|
// Email is the email address for this user.
|
||||||
//
|
//
|
||||||
// required: true
|
// required: true
|
||||||
Email string `json:"email" xorm:" varchar(500) 'user_email'"`
|
Email string `json:"email" xorm:" varchar(500) 'email'"`
|
||||||
|
|
||||||
// the avatar url for this user.
|
// the avatar url for this user.
|
||||||
Avatar string `json:"avatar_url" xorm:" varchar(500) 'user_avatar'"`
|
Avatar string `json:"avatar_url" xorm:" varchar(500) 'avatar'"`
|
||||||
|
|
||||||
// Admin indicates the user is a system administrator.
|
// Admin indicates the user is a system administrator.
|
||||||
//
|
//
|
||||||
// NOTE: If the username is part of the WOODPECKER_ADMIN
|
// NOTE: If the username is part of the WOODPECKER_ADMIN
|
||||||
// environment variable, this value will be set to true on login.
|
// environment variable, this value will be set to true on login.
|
||||||
Admin bool `json:"admin,omitempty" xorm:"user_admin"`
|
Admin bool `json:"admin,omitempty" xorm:"admin"`
|
||||||
|
|
||||||
// Hash is a unique token used to sign tokens.
|
// Hash is a unique token used to sign tokens.
|
||||||
Hash string `json:"-" xorm:"UNIQUE varchar(500) 'user_hash'"`
|
Hash string `json:"-" xorm:"UNIQUE varchar(500) 'hash'"`
|
||||||
|
|
||||||
// OrgID is the of the user as model.Org.
|
// OrgID is the of the user as model.Org.
|
||||||
OrgID int64 `json:"org_id" xorm:"user_org_id"`
|
OrgID int64 `json:"org_id" xorm:"org_id"`
|
||||||
} // @name User
|
} // @name User
|
||||||
|
|
||||||
// TableName return database table name for xorm.
|
// TableName return database table name for xorm.
|
||||||
|
|
|
@ -17,18 +17,18 @@ package model
|
||||||
|
|
||||||
// Workflow represents a workflow in the pipeline.
|
// Workflow represents a workflow in the pipeline.
|
||||||
type Workflow struct {
|
type Workflow struct {
|
||||||
ID int64 `json:"id" xorm:"pk autoincr 'workflow_id'"`
|
ID int64 `json:"id" xorm:"pk autoincr 'id'"`
|
||||||
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'workflow_pipeline_id'"`
|
PipelineID int64 `json:"pipeline_id" xorm:"UNIQUE(s) INDEX 'pipeline_id'"`
|
||||||
PID int `json:"pid" xorm:"UNIQUE(s) 'workflow_pid'"`
|
PID int `json:"pid" xorm:"UNIQUE(s) 'pid'"`
|
||||||
Name string `json:"name" xorm:"workflow_name"`
|
Name string `json:"name" xorm:"name"`
|
||||||
State StatusValue `json:"state" xorm:"workflow_state"`
|
State StatusValue `json:"state" xorm:"state"`
|
||||||
Error string `json:"error,omitempty" xorm:"TEXT 'workflow_error'"`
|
Error string `json:"error,omitempty" xorm:"TEXT 'error'"`
|
||||||
Started int64 `json:"start_time,omitempty" xorm:"workflow_started"`
|
Started int64 `json:"start_time,omitempty" xorm:"started"`
|
||||||
Stopped int64 `json:"end_time,omitempty" xorm:"workflow_stopped"`
|
Stopped int64 `json:"end_time,omitempty" xorm:"stopped"`
|
||||||
AgentID int64 `json:"agent_id,omitempty" xorm:"workflow_agent_id"`
|
AgentID int64 `json:"agent_id,omitempty" xorm:"agent_id"`
|
||||||
Platform string `json:"platform,omitempty" xorm:"workflow_platform"`
|
Platform string `json:"platform,omitempty" xorm:"platform"`
|
||||||
Environ map[string]string `json:"environ,omitempty" xorm:"json 'workflow_environ'"`
|
Environ map[string]string `json:"environ,omitempty" xorm:"json 'environ'"`
|
||||||
AxisID int `json:"-" xorm:"workflow_axis_id"`
|
AxisID int `json:"-" xorm:"axis_id"`
|
||||||
Children []*Step `json:"children,omitempty" xorm:"-"`
|
Children []*Step `json:"children,omitempty" xorm:"-"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -29,16 +29,16 @@ import (
|
||||||
func (s storage) ConfigsForPipeline(pipelineID int64) ([]*model.Config, error) {
|
func (s storage) ConfigsForPipeline(pipelineID int64) ([]*model.Config, error) {
|
||||||
configs := make([]*model.Config, 0, perPage)
|
configs := make([]*model.Config, 0, perPage)
|
||||||
return configs, s.engine.
|
return configs, s.engine.
|
||||||
Table("config").
|
Table("configs").
|
||||||
Join("LEFT", "pipeline_config", "config.config_id = pipeline_config.config_id").
|
Join("LEFT", "pipeline_configs", "configs.id = pipeline_configs.config_id").
|
||||||
Where("pipeline_config.pipeline_id = ?", pipelineID).
|
Where("pipeline_configs.pipeline_id = ?", pipelineID).
|
||||||
Find(&configs)
|
Find(&configs)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) configFindIdentical(sess *xorm.Session, repoID int64, hash, name string) (*model.Config, error) {
|
func (s storage) configFindIdentical(sess *xorm.Session, repoID int64, hash, name string) (*model.Config, error) {
|
||||||
conf := new(model.Config)
|
conf := new(model.Config)
|
||||||
if err := wrapGet(sess.Where(
|
if err := wrapGet(sess.Where(
|
||||||
builder.Eq{"config_repo_id": repoID, "config_hash": hash, "config_name": name},
|
builder.Eq{"repo_id": repoID, "hash": hash, "name": name},
|
||||||
).Get(conf)); err != nil {
|
).Get(conf)); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,30 +20,30 @@ import (
|
||||||
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
var feedItemSelect = `repos.repo_id as feed_repo_id,
|
var feedItemSelect = `repos.id as repo_id,
|
||||||
pipelines.pipeline_id as feed_pipeline_id,
|
pipelines.id as pipeline_id,
|
||||||
pipelines.pipeline_number as feed_pipeline_number,
|
pipelines.number as pipeline_number,
|
||||||
pipelines.pipeline_event as feed_pipeline_event,
|
pipelines.event as pipeline_event,
|
||||||
pipelines.pipeline_status as feed_pipeline_status,
|
pipelines.status as pipeline_status,
|
||||||
pipelines.pipeline_created as feed_pipeline_created,
|
pipelines.created as pipeline_created,
|
||||||
pipelines.pipeline_started as feed_pipeline_started,
|
pipelines.started as pipeline_started,
|
||||||
pipelines.pipeline_finished as feed_pipeline_finished,
|
pipelines.finished as pipeline_finished,
|
||||||
pipelines.pipeline_commit as feed_pipeline_commit,
|
'pipelines.commit' as pipeline_commit,
|
||||||
pipelines.pipeline_branch as feed_pipeline_branch,
|
pipelines.branch as pipeline_branch,
|
||||||
pipelines.pipeline_ref as feed_pipeline_ref,
|
pipelines.ref as pipeline_ref,
|
||||||
pipelines.pipeline_refspec as feed_pipeline_refspec,
|
pipelines.refspec as pipeline_refspec,
|
||||||
pipelines.pipeline_title as feed_pipeline_title,
|
pipelines.title as pipeline_title,
|
||||||
pipelines.pipeline_message as feed_pipeline_message,
|
pipelines.message as pipeline_message,
|
||||||
pipelines.pipeline_author as feed_pipeline_author,
|
pipelines.author as pipeline_author,
|
||||||
pipelines.pipeline_email as feed_pipeline_email,
|
pipelines.email as pipeline_email,
|
||||||
pipelines.pipeline_avatar as feed_pipeline_avatar`
|
pipelines.avatar as pipeline_avatar`
|
||||||
|
|
||||||
func (s storage) GetPipelineQueue() ([]*model.Feed, error) {
|
func (s storage) GetPipelineQueue() ([]*model.Feed, error) {
|
||||||
feed := make([]*model.Feed, 0, perPage)
|
feed := make([]*model.Feed, 0, perPage)
|
||||||
err := s.engine.Table("pipelines").
|
err := s.engine.Table("pipelines").
|
||||||
Select(feedItemSelect).
|
Select(feedItemSelect).
|
||||||
Join("INNER", "repos", "pipelines.pipeline_repo_id = repos.repo_id").
|
Join("INNER", "repos", "pipelines.repo_id = repos.id").
|
||||||
In("pipelines.pipeline_status", model.StatusPending, model.StatusRunning).
|
In("pipelines.status", model.StatusPending, model.StatusRunning).
|
||||||
Find(&feed)
|
Find(&feed)
|
||||||
return feed, err
|
return feed, err
|
||||||
}
|
}
|
||||||
|
@ -52,10 +52,10 @@ func (s storage) UserFeed(user *model.User) ([]*model.Feed, error) {
|
||||||
feed := make([]*model.Feed, 0, perPage)
|
feed := make([]*model.Feed, 0, perPage)
|
||||||
err := s.engine.Table("repos").
|
err := s.engine.Table("repos").
|
||||||
Select(feedItemSelect).
|
Select(feedItemSelect).
|
||||||
Join("INNER", "perms", "repos.repo_id = perms.perm_repo_id").
|
Join("INNER", "perms", "repos.id = perms.repo_id").
|
||||||
Join("INNER", "pipelines", "repos.repo_id = pipelines.pipeline_repo_id").
|
Join("INNER", "pipelines", "repos.id = pipelines.repo_id").
|
||||||
Where(userPushOrAdminCondition(user.ID)).
|
Where(userPushOrAdminCondition(user.ID)).
|
||||||
Desc("pipelines.pipeline_id").
|
Desc("pipelines.id").
|
||||||
Limit(perPage).
|
Limit(perPage).
|
||||||
Find(&feed)
|
Find(&feed)
|
||||||
|
|
||||||
|
@ -67,16 +67,16 @@ func (s storage) RepoListLatest(user *model.User) ([]*model.Feed, error) {
|
||||||
|
|
||||||
err := s.engine.Table("repos").
|
err := s.engine.Table("repos").
|
||||||
Select(feedItemSelect).
|
Select(feedItemSelect).
|
||||||
Join("INNER", "perms", "repos.repo_id = perms.perm_repo_id").
|
Join("INNER", "perms", "repos.id = perms.repo_id").
|
||||||
Join("LEFT", "pipelines", "pipelines.pipeline_id = "+`(
|
Join("LEFT", "pipelines", "pipelines.id = "+`(
|
||||||
SELECT pipelines.pipeline_id FROM pipelines
|
SELECT pipelines.id FROM pipelines
|
||||||
WHERE pipelines.pipeline_repo_id = repos.repo_id
|
WHERE pipelines.repo_id = repos.id
|
||||||
ORDER BY pipelines.pipeline_id DESC
|
ORDER BY pipelines.id DESC
|
||||||
LIMIT 1
|
LIMIT 1
|
||||||
)`).
|
)`).
|
||||||
Where(userPushOrAdminCondition(user.ID)).
|
Where(userPushOrAdminCondition(user.ID)).
|
||||||
And(builder.Eq{"repos.repo_active": true}).
|
And(builder.Eq{"repos.active": true}).
|
||||||
Asc("repos.repo_full_name").
|
Asc("repos.full_name").
|
||||||
Find(&feed)
|
Find(&feed)
|
||||||
|
|
||||||
return feed, err
|
return feed, err
|
||||||
|
|
|
@ -43,7 +43,7 @@ func (oldStep021) TableName() string {
|
||||||
var parentStepsToWorkflows = xormigrate.Migration{
|
var parentStepsToWorkflows = xormigrate.Migration{
|
||||||
ID: "parent-steps-to-workflows",
|
ID: "parent-steps-to-workflows",
|
||||||
MigrateSession: func(sess *xorm.Session) error {
|
MigrateSession: func(sess *xorm.Session) error {
|
||||||
if err := sess.Sync(new(model.Workflow)); err != nil {
|
if err := sess.Sync(new(workflowV031)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
// make sure the columns exist before removing them
|
// make sure the columns exist before removing them
|
||||||
|
@ -58,7 +58,7 @@ var parentStepsToWorkflows = xormigrate.Migration{
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, p := range parentSteps {
|
for _, p := range parentSteps {
|
||||||
asWorkflow := &model.Workflow{
|
asWorkflow := &workflowV031{
|
||||||
PipelineID: p.PipelineID,
|
PipelineID: p.PipelineID,
|
||||||
PID: p.PID,
|
PID: p.PID,
|
||||||
Name: p.Name,
|
Name: p.Name,
|
||||||
|
|
|
@ -66,7 +66,7 @@ var addOrgs = xormigrate.Migration{
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sess.Sync(new(model.Org), new(syncRepo022), new(model.User)); err != nil {
|
if err := sess.Sync(new(model.Org), new(syncRepo022), new(userV031)); err != nil {
|
||||||
return fmt.Errorf("sync new models failed: %w", err)
|
return fmt.Errorf("sync new models failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ var addOrgs = xormigrate.Migration{
|
||||||
|
|
||||||
// check if it's a registered user
|
// check if it's a registered user
|
||||||
if _, ok := users[orgName]; !ok {
|
if _, ok := users[orgName]; !ok {
|
||||||
exist, err := sess.Where("user_login = ?", orgName).Exist(new(model.User))
|
exist, err := sess.Where("user_login = ?", orgName).Exist(new(userV031))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("check if user '%s' exist failed: %w", orgName, err)
|
return fmt.Errorf("check if user '%s' exist failed: %w", orgName, err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,12 +26,12 @@ import (
|
||||||
var addOrgID = xormigrate.Migration{
|
var addOrgID = xormigrate.Migration{
|
||||||
ID: "add-org-id",
|
ID: "add-org-id",
|
||||||
MigrateSession: func(sess *xorm.Session) error {
|
MigrateSession: func(sess *xorm.Session) error {
|
||||||
if err := sess.Sync(new(model.User)); err != nil {
|
if err := sess.Sync(new(userV031)); err != nil {
|
||||||
return fmt.Errorf("sync new models failed: %w", err)
|
return fmt.Errorf("sync new models failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
// get all users
|
// get all users
|
||||||
var users []*model.User
|
var users []*userV031
|
||||||
if err := sess.Find(&users); err != nil {
|
if err := sess.Find(&users); err != nil {
|
||||||
return fmt.Errorf("find all repos failed: %w", err)
|
return fmt.Errorf("find all repos failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,14 +23,83 @@ import (
|
||||||
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type userV030 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'user_id'"`
|
||||||
|
ForgeID int64 `xorm:"forge_id"`
|
||||||
|
ForgeRemoteID model.ForgeRemoteID `xorm:"forge_remote_id"`
|
||||||
|
Login string `xorm:"UNIQUE 'user_login'"`
|
||||||
|
Token string `xorm:"TEXT 'user_token'"`
|
||||||
|
Secret string `xorm:"TEXT 'user_secret'"`
|
||||||
|
Expiry int64 `xorm:"user_expiry"`
|
||||||
|
Email string `xorm:" varchar(500) 'user_email'"`
|
||||||
|
Avatar string `xorm:" varchar(500) 'user_avatar'"`
|
||||||
|
Admin bool `xorm:"user_admin"`
|
||||||
|
Hash string `xorm:"UNIQUE varchar(500) 'user_hash'"`
|
||||||
|
OrgID int64 `xorm:"user_org_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (userV030) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type repoV030 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'repo_id'"`
|
||||||
|
UserID int64 `xorm:"repo_user_id"`
|
||||||
|
ForgeID int64 `xorm:"forge_id"`
|
||||||
|
ForgeRemoteID model.ForgeRemoteID `xorm:"forge_remote_id"`
|
||||||
|
OrgID int64 `xorm:"repo_org_id"`
|
||||||
|
Owner string `xorm:"UNIQUE(name) 'repo_owner'"`
|
||||||
|
Name string `xorm:"UNIQUE(name) 'repo_name'"`
|
||||||
|
FullName string `xorm:"UNIQUE 'repo_full_name'"`
|
||||||
|
Avatar string `xorm:"varchar(500) 'repo_avatar'"`
|
||||||
|
ForgeURL string `xorm:"varchar(1000) 'repo_forge_url'"`
|
||||||
|
Clone string `xorm:"varchar(1000) 'repo_clone'"`
|
||||||
|
CloneSSH string `xorm:"varchar(1000) 'repo_clone_ssh'"`
|
||||||
|
Branch string `xorm:"varchar(500) 'repo_branch'"`
|
||||||
|
SCMKind model.SCMKind `xorm:"varchar(50) 'repo_scm'"`
|
||||||
|
PREnabled bool `xorm:"DEFAULT TRUE 'repo_pr_enabled'"`
|
||||||
|
Timeout int64 `xorm:"repo_timeout"`
|
||||||
|
Visibility model.RepoVisibility `xorm:"varchar(10) 'repo_visibility'"`
|
||||||
|
IsSCMPrivate bool `xorm:"repo_private"`
|
||||||
|
IsTrusted bool `xorm:"repo_trusted"`
|
||||||
|
IsGated bool `xorm:"repo_gated"`
|
||||||
|
IsActive bool `xorm:"repo_active"`
|
||||||
|
AllowPull bool `xorm:"repo_allow_pr"`
|
||||||
|
AllowDeploy bool `xorm:"repo_allow_deploy"`
|
||||||
|
Config string `xorm:"varchar(500) 'repo_config_path'"`
|
||||||
|
Hash string `xorm:"varchar(500) 'repo_hash'"`
|
||||||
|
Perm *model.Perm `xorm:"-"`
|
||||||
|
CancelPreviousPipelineEvents []model.WebhookEvent `xorm:"json 'cancel_previous_pipeline_events'"`
|
||||||
|
NetrcOnlyTrusted bool `xorm:"NOT NULL DEFAULT true 'netrc_only_trusted'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repoV030) TableName() string {
|
||||||
|
return "repos"
|
||||||
|
}
|
||||||
|
|
||||||
|
type forgeV030 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'id'"`
|
||||||
|
Type model.ForgeType `xorm:"VARCHAR(250) 'type'"`
|
||||||
|
URL string `xorm:"VARCHAR(500) 'url'"`
|
||||||
|
Client string `xorm:"VARCHAR(250) 'client'"`
|
||||||
|
ClientSecret string `xorm:"VARCHAR(250) 'client_secret'"`
|
||||||
|
SkipVerify bool `xorm:"bool 'skip_verify'"`
|
||||||
|
OAuthHost string `xorm:"VARCHAR(250) 'oauth_host'"` // public url for oauth if different from url
|
||||||
|
AdditionalOptions map[string]any `xorm:"json 'additional_options'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (forgeV030) TableName() string {
|
||||||
|
return "forge"
|
||||||
|
}
|
||||||
|
|
||||||
var setForgeID = xormigrate.Migration{
|
var setForgeID = xormigrate.Migration{
|
||||||
ID: "set-forge-id",
|
ID: "set-forge-id",
|
||||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||||
if err := sess.Sync(new(model.User), new(model.Repo), new(model.Forge), new(model.Org)); err != nil {
|
if err := sess.Sync(new(userV030), new(repoV030), new(forgeV030), new(model.Org)); err != nil {
|
||||||
return fmt.Errorf("sync new models failed: %w", err)
|
return fmt.Errorf("sync new models failed: %w", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = sess.Exec(fmt.Sprintf("UPDATE `%s` SET forge_id=1;", model.User{}.TableName()))
|
_, err = sess.Exec(fmt.Sprintf("UPDATE `%s` SET forge_id=1;", userV030{}.TableName()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -40,7 +109,7 @@ var setForgeID = xormigrate.Migration{
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err = sess.Exec(fmt.Sprintf("UPDATE `%s` SET forge_id=1;", model.Repo{}.TableName()))
|
_, err = sess.Exec(fmt.Sprintf("UPDATE `%s` SET forge_id=1;", repoV030{}.TableName()))
|
||||||
return err
|
return err
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
637
server/store/datastore/migration/031_unify_columns_tables.go
Normal file
637
server/store/datastore/migration/031_unify_columns_tables.go
Normal file
|
@ -0,0 +1,637 @@
|
||||||
|
// Copyright 2024 Woodpecker Authors
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package migration
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
|
||||||
|
"src.techknowlogick.com/xormigrate"
|
||||||
|
"xorm.io/xorm"
|
||||||
|
|
||||||
|
"go.woodpecker-ci.org/woodpecker/v2/pipeline/errors/types"
|
||||||
|
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
||||||
|
)
|
||||||
|
|
||||||
|
type configV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'config_id'"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) 'config_repo_id'"`
|
||||||
|
Hash string `xorm:"UNIQUE(s) 'config_hash'"`
|
||||||
|
Name string `xorm:"UNIQUE(s) 'config_name'"`
|
||||||
|
Data []byte `xorm:"LONGBLOB 'config_data'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (configV031) TableName() string {
|
||||||
|
return "config"
|
||||||
|
}
|
||||||
|
|
||||||
|
type cronV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'i_d'"`
|
||||||
|
Name string `xorm:"name UNIQUE(s) INDEX"`
|
||||||
|
RepoID int64 `xorm:"repo_id UNIQUE(s) INDEX"`
|
||||||
|
CreatorID int64 `xorm:"creator_id INDEX"`
|
||||||
|
NextExec int64 `xorm:"next_exec"`
|
||||||
|
Schedule string `xorm:"schedule NOT NULL"`
|
||||||
|
Created int64 `xorm:"created NOT NULL DEFAULT 0"`
|
||||||
|
Branch string `xorm:"branch"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (cronV031) TableName() string {
|
||||||
|
return "crons"
|
||||||
|
}
|
||||||
|
|
||||||
|
type permV031 struct {
|
||||||
|
UserID int64 `xorm:"UNIQUE(s) INDEX NOT NULL 'perm_user_id'"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) INDEX NOT NULL 'perm_repo_id'"`
|
||||||
|
Pull bool `xorm:"perm_pull"`
|
||||||
|
Push bool `xorm:"perm_push"`
|
||||||
|
Admin bool `xorm:"perm_admin"`
|
||||||
|
Synced int64 `xorm:"perm_synced"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (permV031) TableName() string {
|
||||||
|
return "perms"
|
||||||
|
}
|
||||||
|
|
||||||
|
type pipelineV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'pipeline_id'"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) INDEX 'pipeline_repo_id'"`
|
||||||
|
Number int64 `xorm:"UNIQUE(s) 'pipeline_number'"`
|
||||||
|
Author string `xorm:"INDEX 'pipeline_author'"`
|
||||||
|
Parent int64 `xorm:"pipeline_parent"`
|
||||||
|
Event model.WebhookEvent `xorm:"pipeline_event"`
|
||||||
|
Status model.StatusValue `xorm:"INDEX 'pipeline_status'"`
|
||||||
|
Errors []*types.PipelineError `xorm:"json 'pipeline_errors'"`
|
||||||
|
Created int64 `xorm:"pipeline_created"`
|
||||||
|
Started int64 `xorm:"pipeline_started"`
|
||||||
|
Finished int64 `xorm:"pipeline_finished"`
|
||||||
|
Deploy string `xorm:"pipeline_deploy"`
|
||||||
|
DeployTask string `xorm:"pipeline_deploy_task"`
|
||||||
|
Commit string `xorm:"pipeline_commit"`
|
||||||
|
Branch string `xorm:"pipeline_branch"`
|
||||||
|
Ref string `xorm:"pipeline_ref"`
|
||||||
|
Refspec string `xorm:"pipeline_refspec"`
|
||||||
|
Title string `xorm:"pipeline_title"`
|
||||||
|
Message string `xorm:"TEXT 'pipeline_message'"`
|
||||||
|
Timestamp int64 `xorm:"pipeline_timestamp"`
|
||||||
|
Sender string `xorm:"pipeline_sender"` // uses reported user for webhooks and name of cron for cron pipelines
|
||||||
|
Avatar string `xorm:"pipeline_avatar"`
|
||||||
|
Email string `xorm:"pipeline_email"`
|
||||||
|
ForgeURL string `xorm:"pipeline_forge_url"`
|
||||||
|
Reviewer string `xorm:"pipeline_reviewer"`
|
||||||
|
Reviewed int64 `xorm:"pipeline_reviewed"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (pipelineV031) TableName() string {
|
||||||
|
return "pipelines"
|
||||||
|
}
|
||||||
|
|
||||||
|
type redirectionV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'redirection_id'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r redirectionV031) TableName() string {
|
||||||
|
return "redirections"
|
||||||
|
}
|
||||||
|
|
||||||
|
type registryV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'registry_id'"`
|
||||||
|
RepoID int64 `xorm:"UNIQUE(s) INDEX 'registry_repo_id'"`
|
||||||
|
Address string `xorm:"UNIQUE(s) INDEX 'registry_addr'"`
|
||||||
|
Username string `xorm:"varchar(2000) 'registry_username'"`
|
||||||
|
Password string `xorm:"TEXT 'registry_password'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type repoV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'repo_id'"`
|
||||||
|
UserID int64 `xorm:"repo_user_id"`
|
||||||
|
OrgID int64 `xorm:"repo_org_id"`
|
||||||
|
Owner string `xorm:"UNIQUE(name) 'repo_owner'"`
|
||||||
|
Name string `xorm:"UNIQUE(name) 'repo_name'"`
|
||||||
|
FullName string `xorm:"UNIQUE 'repo_full_name'"`
|
||||||
|
Avatar string `xorm:"varchar(500) 'repo_avatar'"`
|
||||||
|
ForgeURL string `xorm:"varchar(1000) 'repo_forge_url'"`
|
||||||
|
Clone string `xorm:"varchar(1000) 'repo_clone'"`
|
||||||
|
CloneSSH string `xorm:"varchar(1000) 'repo_clone_ssh'"`
|
||||||
|
Branch string `xorm:"varchar(500) 'repo_branch'"`
|
||||||
|
SCMKind model.SCMKind `xorm:"varchar(50) 'repo_scm'"`
|
||||||
|
PREnabled bool `xorm:"DEFAULT TRUE 'repo_pr_enabled'"`
|
||||||
|
Timeout int64 `xorm:"repo_timeout"`
|
||||||
|
Visibility model.RepoVisibility `xorm:"varchar(10) 'repo_visibility'"`
|
||||||
|
IsSCMPrivate bool `xorm:"repo_private"`
|
||||||
|
IsTrusted bool `xorm:"repo_trusted"`
|
||||||
|
IsGated bool `xorm:"repo_gated"`
|
||||||
|
IsActive bool `xorm:"repo_active"`
|
||||||
|
AllowPull bool `xorm:"repo_allow_pr"`
|
||||||
|
AllowDeploy bool `xorm:"repo_allow_deploy"`
|
||||||
|
Config string `xorm:"varchar(500) 'repo_config_path'"`
|
||||||
|
Hash string `xorm:"varchar(500) 'repo_hash'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (repoV031) TableName() string {
|
||||||
|
return "repos"
|
||||||
|
}
|
||||||
|
|
||||||
|
type secretV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'secret_id'"`
|
||||||
|
OrgID int64 `xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'secret_org_id'"`
|
||||||
|
RepoID int64 `xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'secret_repo_id'"`
|
||||||
|
Name string `xorm:"NOT NULL UNIQUE(s) INDEX 'secret_name'"`
|
||||||
|
Value string `xorm:"TEXT 'secret_value'"`
|
||||||
|
Images []string `xorm:"json 'secret_images'"`
|
||||||
|
Events []model.WebhookEvent `xorm:"json 'secret_events'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (secretV031) TableName() string {
|
||||||
|
return "secrets"
|
||||||
|
}
|
||||||
|
|
||||||
|
type stepV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'step_id'"`
|
||||||
|
UUID string `xorm:"INDEX 'step_uuid'"`
|
||||||
|
PipelineID int64 `xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"`
|
||||||
|
PID int `xorm:"UNIQUE(s) 'step_pid'"`
|
||||||
|
PPID int `xorm:"step_ppid"`
|
||||||
|
Name string `xorm:"step_name"`
|
||||||
|
State model.StatusValue `xorm:"step_state"`
|
||||||
|
Error string `xorm:"TEXT 'step_error'"`
|
||||||
|
Failure string `xorm:"step_failure"`
|
||||||
|
ExitCode int `xorm:"step_exit_code"`
|
||||||
|
Started int64 `xorm:"step_started"`
|
||||||
|
Stopped int64 `xorm:"step_stopped"`
|
||||||
|
Type model.StepType `xorm:"step_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (stepV031) TableName() string {
|
||||||
|
return "steps"
|
||||||
|
}
|
||||||
|
|
||||||
|
type taskV031 struct {
|
||||||
|
ID string `xorm:"PK UNIQUE 'task_id'"`
|
||||||
|
Data []byte `xorm:"LONGBLOB 'task_data'"`
|
||||||
|
Labels map[string]string `xorm:"json 'task_labels'"`
|
||||||
|
Dependencies []string `xorm:"json 'task_dependencies'"`
|
||||||
|
RunOn []string `xorm:"json 'task_run_on'"`
|
||||||
|
DepStatus map[string]model.StatusValue `xorm:"json 'task_dep_status'"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (taskV031) TableName() string {
|
||||||
|
return "tasks"
|
||||||
|
}
|
||||||
|
|
||||||
|
type userV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'user_id'"`
|
||||||
|
Login string `xorm:"UNIQUE 'user_login'"`
|
||||||
|
Token string `xorm:"TEXT 'user_token'"`
|
||||||
|
Secret string `xorm:"TEXT 'user_secret'"`
|
||||||
|
Expiry int64 `xorm:"user_expiry"`
|
||||||
|
Email string `xorm:" varchar(500) 'user_email'"`
|
||||||
|
Avatar string `xorm:" varchar(500) 'user_avatar'"`
|
||||||
|
Admin bool `xorm:"user_admin"`
|
||||||
|
Hash string `xorm:"UNIQUE varchar(500) 'user_hash'"`
|
||||||
|
OrgID int64 `xorm:"user_org_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (userV031) TableName() string {
|
||||||
|
return "users"
|
||||||
|
}
|
||||||
|
|
||||||
|
type workflowV031 struct {
|
||||||
|
ID int64 `xorm:"pk autoincr 'workflow_id'"`
|
||||||
|
PipelineID int64 `xorm:"UNIQUE(s) INDEX 'workflow_pipeline_id'"`
|
||||||
|
PID int `xorm:"UNIQUE(s) 'workflow_pid'"`
|
||||||
|
Name string `xorm:"workflow_name"`
|
||||||
|
State model.StatusValue `xorm:"workflow_state"`
|
||||||
|
Error string `xorm:"TEXT 'workflow_error'"`
|
||||||
|
Started int64 `xorm:"workflow_started"`
|
||||||
|
Stopped int64 `xorm:"workflow_stopped"`
|
||||||
|
AgentID int64 `xorm:"workflow_agent_id"`
|
||||||
|
Platform string `xorm:"workflow_platform"`
|
||||||
|
Environ map[string]string `xorm:"json 'workflow_environ'"`
|
||||||
|
AxisID int `xorm:"workflow_axis_id"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (workflowV031) TableName() string {
|
||||||
|
return "workflows"
|
||||||
|
}
|
||||||
|
|
||||||
|
type serverConfigV031 struct {
|
||||||
|
Key string `xorm:"pk 'key'"`
|
||||||
|
Value string `xorm:"value"`
|
||||||
|
}
|
||||||
|
|
||||||
|
func (serverConfigV031) TableName() string {
|
||||||
|
return "server_config"
|
||||||
|
}
|
||||||
|
|
||||||
|
var unifyColumnsTables = xormigrate.Migration{
|
||||||
|
ID: "unify-columns-tables",
|
||||||
|
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||||
|
if err := sess.Sync(new(configV031), new(cronV031), new(permV031), new(pipelineV031), new(redirectionV031), new(registryV031), new(repoV031), new(secretV031), new(stepV031), new(taskV031), new(userV031), new(workflowV031), new(serverConfigV031)); err != nil {
|
||||||
|
return fmt.Errorf("sync models failed: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Config
|
||||||
|
if err := renameColumn(sess, "config", "config_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "config", "config_repo_id", "repo_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "config", "config_hash", "hash"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "config", "config_name", "name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "config", "config_data", "data"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameTable(sess, "config", "configs"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// PipelineConfig
|
||||||
|
if err := renameTable(sess, "pipeline_config", "pipeline_configs"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cron
|
||||||
|
if err := renameColumn(sess, "crons", "i_d", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Forge
|
||||||
|
if err := renameTable(sess, "forge", "forges"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perm
|
||||||
|
if err := renameColumn(sess, "perms", "perm_user_id", "user_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "perms", "perm_repo_id", "repo_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "perms", "perm_pull", "pull"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "perms", "perm_push", "push"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "perms", "perm_admin", "admin"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "perms", "perm_synced", "synced"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Pipeline
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_repo_id", "repo_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_number", "number"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_author", "author"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_parent", "parent"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_event", "event"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_status", "status"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_errors", "errors"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_created", "created"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_started", "started"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_finished", "finished"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_deploy", "deploy"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_deploy_task", "deploy_task"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_commit", "commit"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_branch", "branch"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_ref", "ref"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_refspec", "refspec"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_title", "title"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_message", "message"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_timestamp", "timestamp"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_sender", "sender"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_avatar", "avatar"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_email", "email"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_forge_url", "forge_url"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_reviewer", "reviewer"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "pipelines", "pipeline_reviewed", "reviewed"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Redirection
|
||||||
|
if err := renameColumn(sess, "redirections", "redirection_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Registry
|
||||||
|
if err := renameColumn(sess, "registry", "registry_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "registry", "registry_repo_id", "repo_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "registry", "registry_addr", "address"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "registry", "registry_username", "username"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "registry", "registry_password", "password"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameTable(sess, "registry", "registries"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Repo
|
||||||
|
if err := renameColumn(sess, "repos", "repo_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_user_id", "user_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_org_id", "org_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_owner", "owner"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_name", "name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_full_name", "full_name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_avatar", "avatar"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_forge_url", "forge_url"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_clone", "clone"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_clone_ssh", "clone_ssh"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_branch", "branch"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_scm", "scm"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_pr_enabled", "pr_enabled"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_timeout", "timeout"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_visibility", "visibility"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_private", "private"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_trusted", "trusted"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_gated", "gated"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_active", "active"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_allow_pr", "allow_pr"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_allow_deploy", "allow_deploy"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_config_path", "config_path"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "repos", "repo_hash", "hash"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Secrets
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_org_id", "org_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_repo_id", "repo_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_name", "name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_value", "value"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_images", "images"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "secrets", "secret_events", "events"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// ServerConfig
|
||||||
|
if err := renameTable(sess, "server_config", "server_configs"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Step
|
||||||
|
if err := renameColumn(sess, "steps", "step_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_uuid", "uuid"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_pipeline_id", "pipeline_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_pid", "pid"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_ppid", "ppid"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_name", "name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_state", "state"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_error", "error"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_failure", "failure"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_exit_code", "exit_code"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_started", "started"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_stopped", "stopped"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "steps", "step_type", "type"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Task
|
||||||
|
if err := renameColumn(sess, "tasks", "task_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "tasks", "task_data", "data"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "tasks", "task_labels", "labels"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "tasks", "task_dependencies", "dependencies"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "tasks", "task_run_on", "run_on"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "tasks", "task_dep_status", "dependencies_status"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// User
|
||||||
|
if err := renameColumn(sess, "users", "user_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_login", "login"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_token", "token"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_secret", "secret"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_expiry", "expiry"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_email", "email"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_avatar", "avatar"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_admin", "admin"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_hash", "hash"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "users", "user_org_id", "org_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Workflow
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_id", "id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_pipeline_id", "pipeline_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_pid", "pid"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_name", "name"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_state", "state"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_error", "error"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_started", "started"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_stopped", "stopped"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_agent_id", "agent_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_platform", "platform"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_environ", "environ"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err := renameColumn(sess, "workflows", "workflow_axis_id", "axis_id"); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
},
|
||||||
|
}
|
|
@ -60,6 +60,7 @@ var migrationTasks = []*xormigrate.Migration{
|
||||||
&renameLinkToURL,
|
&renameLinkToURL,
|
||||||
&cleanRegistryPipeline,
|
&cleanRegistryPipeline,
|
||||||
&setForgeID,
|
&setForgeID,
|
||||||
|
&unifyColumnsTables,
|
||||||
}
|
}
|
||||||
|
|
||||||
var allBeans = []any{
|
var allBeans = []any{
|
||||||
|
|
|
@ -56,12 +56,12 @@ func (s storage) OrgDelete(id int64) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) orgDelete(sess *xorm.Session, id int64) error {
|
func (s storage) orgDelete(sess *xorm.Session, id int64) error {
|
||||||
if _, err := sess.Where("secret_org_id = ?", id).Delete(new(model.Secret)); err != nil {
|
if _, err := sess.Where("org_id = ?", id).Delete(new(model.Secret)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var repos []*model.Repo
|
var repos []*model.Repo
|
||||||
if err := sess.Where("repo_org_id = ?", id).Find(&repos); err != nil {
|
if err := sess.Where("org_id = ?", id).Find(&repos); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ func (s storage) OrgFindByName(name string) (*model.Org, error) {
|
||||||
|
|
||||||
func (s storage) OrgRepoList(org *model.Org, p *model.ListOptions) ([]*model.Repo, error) {
|
func (s storage) OrgRepoList(org *model.Org, p *model.ListOptions) ([]*model.Repo, error) {
|
||||||
var repos []*model.Repo
|
var repos []*model.Repo
|
||||||
return repos, s.paginate(p).OrderBy("repo_id").Where("repo_org_id = ?", org.ID).Find(&repos)
|
return repos, s.paginate(p).OrderBy("id").Where("org_id = ?", org.ID).Find(&repos)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) OrgList(p *model.ListOptions) ([]*model.Org, error) {
|
func (s storage) OrgList(p *model.ListOptions) ([]*model.Org, error) {
|
||||||
|
|
|
@ -26,7 +26,7 @@ import (
|
||||||
func (s storage) PermFind(user *model.User, repo *model.Repo) (*model.Perm, error) {
|
func (s storage) PermFind(user *model.User, repo *model.Repo) (*model.Perm, error) {
|
||||||
perm := new(model.Perm)
|
perm := new(model.Perm)
|
||||||
return perm, wrapGet(s.engine.
|
return perm, wrapGet(s.engine.
|
||||||
Where(builder.Eq{"perm_user_id": user.ID, "perm_repo_id": repo.ID}).
|
Where(builder.Eq{"user_id": user.ID, "repo_id": repo.ID}).
|
||||||
Get(perm))
|
Get(perm))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,11 +75,11 @@ func (s storage) permUpsert(sess *xorm.Session, perm *model.Perm) error {
|
||||||
// userPushOrAdminCondition return condition where user must have push or admin rights
|
// userPushOrAdminCondition return condition where user must have push or admin rights
|
||||||
// if used make sure to have permission table ("perms") joined.
|
// if used make sure to have permission table ("perms") joined.
|
||||||
func userPushOrAdminCondition(userID int64) builder.Cond {
|
func userPushOrAdminCondition(userID int64) builder.Cond {
|
||||||
return builder.Eq{"perms.perm_user_id": userID}.
|
return builder.Eq{"perms.user_id": userID}.
|
||||||
And(builder.Eq{"perms.perm_push": true}.
|
And(builder.Eq{"perms.push": true}.
|
||||||
Or(builder.Eq{"perms.perm_admin": true}))
|
Or(builder.Eq{"perms.admin": true}))
|
||||||
}
|
}
|
||||||
|
|
||||||
func userIDAndRepoIDCond(perm *model.Perm) builder.Cond {
|
func userIDAndRepoIDCond(perm *model.Perm) builder.Cond {
|
||||||
return builder.Eq{"perm_user_id": perm.UserID, "perm_repo_id": perm.RepoID}
|
return builder.Eq{"user_id": perm.UserID, "repo_id": perm.RepoID}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,44 +31,44 @@ func (s storage) GetPipeline(id int64) (*model.Pipeline, error) {
|
||||||
func (s storage) GetPipelineNumber(repo *model.Repo, num int64) (*model.Pipeline, error) {
|
func (s storage) GetPipelineNumber(repo *model.Repo, num int64) (*model.Pipeline, error) {
|
||||||
pipeline := new(model.Pipeline)
|
pipeline := new(model.Pipeline)
|
||||||
return pipeline, wrapGet(s.engine.Where(
|
return pipeline, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"pipeline_repo_id": repo.ID, "pipeline_number": num},
|
builder.Eq{"repo_id": repo.ID, "number": num},
|
||||||
).Get(pipeline))
|
).Get(pipeline))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetPipelineLast(repo *model.Repo, branch string) (*model.Pipeline, error) {
|
func (s storage) GetPipelineLast(repo *model.Repo, branch string) (*model.Pipeline, error) {
|
||||||
pipeline := new(model.Pipeline)
|
pipeline := new(model.Pipeline)
|
||||||
return pipeline, wrapGet(s.engine.
|
return pipeline, wrapGet(s.engine.
|
||||||
Desc("pipeline_number").
|
Desc("number").
|
||||||
Where(builder.Eq{"pipeline_repo_id": repo.ID, "pipeline_branch": branch, "pipeline_event": model.EventPush}).
|
Where(builder.Eq{"repo_id": repo.ID, "branch": branch, "event": model.EventPush}).
|
||||||
Get(pipeline))
|
Get(pipeline))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetPipelineLastBefore(repo *model.Repo, branch string, num int64) (*model.Pipeline, error) {
|
func (s storage) GetPipelineLastBefore(repo *model.Repo, branch string, num int64) (*model.Pipeline, error) {
|
||||||
pipeline := new(model.Pipeline)
|
pipeline := new(model.Pipeline)
|
||||||
return pipeline, wrapGet(s.engine.
|
return pipeline, wrapGet(s.engine.
|
||||||
Desc("pipeline_number").
|
Desc("number").
|
||||||
Where(builder.Lt{"pipeline_id": num}.
|
Where(builder.Lt{"id": num}.
|
||||||
And(builder.Eq{"pipeline_repo_id": repo.ID, "pipeline_branch": branch})).
|
And(builder.Eq{"repo_id": repo.ID, "branch": branch})).
|
||||||
Get(pipeline))
|
Get(pipeline))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetPipelineList(repo *model.Repo, p *model.ListOptions, f *model.PipelineFilter) ([]*model.Pipeline, error) {
|
func (s storage) GetPipelineList(repo *model.Repo, p *model.ListOptions, f *model.PipelineFilter) ([]*model.Pipeline, error) {
|
||||||
pipelines := make([]*model.Pipeline, 0, 16)
|
pipelines := make([]*model.Pipeline, 0, 16)
|
||||||
|
|
||||||
cond := builder.NewCond().And(builder.Eq{"pipeline_repo_id": repo.ID})
|
cond := builder.NewCond().And(builder.Eq{"repo_id": repo.ID})
|
||||||
|
|
||||||
if f != nil {
|
if f != nil {
|
||||||
if f.After != 0 {
|
if f.After != 0 {
|
||||||
cond = cond.And(builder.Gt{"pipeline_created": f.After})
|
cond = cond.And(builder.Gt{"created": f.After})
|
||||||
}
|
}
|
||||||
|
|
||||||
if f.Before != 0 {
|
if f.Before != 0 {
|
||||||
cond = cond.And(builder.Lt{"pipeline_created": f.Before})
|
cond = cond.And(builder.Lt{"created": f.Before})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return pipelines, s.paginate(p).Where(cond).
|
return pipelines, s.paginate(p).Where(cond).
|
||||||
Desc("pipeline_number").
|
Desc("number").
|
||||||
Find(&pipelines)
|
Find(&pipelines)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -76,9 +76,9 @@ func (s storage) GetPipelineList(repo *model.Repo, p *model.ListOptions, f *mode
|
||||||
func (s storage) GetActivePipelineList(repo *model.Repo) ([]*model.Pipeline, error) {
|
func (s storage) GetActivePipelineList(repo *model.Repo) ([]*model.Pipeline, error) {
|
||||||
pipelines := make([]*model.Pipeline, 0)
|
pipelines := make([]*model.Pipeline, 0)
|
||||||
query := s.engine.
|
query := s.engine.
|
||||||
Where("pipeline_repo_id = ?", repo.ID).
|
Where("repo_id = ?", repo.ID).
|
||||||
In("pipeline_status", model.StatusPending, model.StatusRunning, model.StatusBlocked).
|
In("status", model.StatusPending, model.StatusRunning, model.StatusBlocked).
|
||||||
Desc("pipeline_number")
|
Desc("number")
|
||||||
return pipelines, query.Find(&pipelines)
|
return pipelines, query.Find(&pipelines)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -93,7 +93,7 @@ func (s storage) CreatePipeline(pipeline *model.Pipeline, stepList ...*model.Ste
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
repoExist, err := sess.Where("repo_id = ?", pipeline.RepoID).Exist(&model.Repo{})
|
repoExist, err := sess.Where("id = ?", pipeline.RepoID).Exist(&model.Repo{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -104,9 +104,9 @@ func (s storage) CreatePipeline(pipeline *model.Pipeline, stepList ...*model.Ste
|
||||||
|
|
||||||
// calc pipeline number
|
// calc pipeline number
|
||||||
var number int64
|
var number int64
|
||||||
if _, err := sess.Select("MAX(pipeline_number)").
|
if _, err := sess.Select("MAX(number)").
|
||||||
Table(new(model.Pipeline)).
|
Table(new(model.Pipeline)).
|
||||||
Where("pipeline_repo_id = ?", pipeline.RepoID).
|
Where("repo_id = ?", pipeline.RepoID).
|
||||||
Get(&number); err != nil {
|
Get(&number); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -154,7 +154,7 @@ func (s storage) deletePipeline(sess *xorm.Session, pipelineID int64) error {
|
||||||
}
|
}
|
||||||
if !exist {
|
if !exist {
|
||||||
// this config is only used for this pipeline. so delete it
|
// this config is only used for this pipeline. so delete it
|
||||||
if _, err := sess.Where(builder.Eq{"config_id": confID}).Delete(new(model.Config)); err != nil {
|
if _, err := sess.Where(builder.Eq{"id": confID}).Delete(new(model.Config)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,13 +23,13 @@ import (
|
||||||
func (s storage) RegistryFind(repo *model.Repo, addr string) (*model.Registry, error) {
|
func (s storage) RegistryFind(repo *model.Repo, addr string) (*model.Registry, error) {
|
||||||
reg := new(model.Registry)
|
reg := new(model.Registry)
|
||||||
return reg, wrapGet(s.engine.Where(
|
return reg, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"registry_repo_id": repo.ID, "registry_addr": addr},
|
builder.Eq{"repo_id": repo.ID, "address": addr},
|
||||||
).Get(reg))
|
).Get(reg))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) RegistryList(repo *model.Repo, p *model.ListOptions) ([]*model.Registry, error) {
|
func (s storage) RegistryList(repo *model.Repo, p *model.ListOptions) ([]*model.Registry, error) {
|
||||||
var regs []*model.Registry
|
var regs []*model.Registry
|
||||||
return regs, s.paginate(p).OrderBy("registry_id").Where("registry_repo_id = ?", repo.ID).Find(®s)
|
return regs, s.paginate(p).OrderBy("id").Where("repo_id = ?", repo.ID).Find(®s)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) RegistryCreate(registry *model.Registry) error {
|
func (s storage) RegistryCreate(registry *model.Registry) error {
|
||||||
|
|
|
@ -73,11 +73,11 @@ func (s storage) GetRepoName(fullName string) (*model.Repo, error) {
|
||||||
|
|
||||||
func (s storage) getRepoName(e *xorm.Session, fullName string) (*model.Repo, error) {
|
func (s storage) getRepoName(e *xorm.Session, fullName string) (*model.Repo, error) {
|
||||||
repo := new(model.Repo)
|
repo := new(model.Repo)
|
||||||
return repo, wrapGet(e.Where("LOWER(repo_full_name) = ?", strings.ToLower(fullName)).Get(repo))
|
return repo, wrapGet(e.Where("LOWER(full_name) = ?", strings.ToLower(fullName)).Get(repo))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetRepoCount() (int64, error) {
|
func (s storage) GetRepoCount() (int64, error) {
|
||||||
return s.engine.Where(builder.Eq{"repo_active": true}).Count(new(model.Repo))
|
return s.engine.Where(builder.Eq{"active": true}).Count(new(model.Repo))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) CreateRepo(repo *model.Repo) error {
|
func (s storage) CreateRepo(repo *model.Repo) error {
|
||||||
|
@ -105,16 +105,16 @@ func (s storage) DeleteRepo(repo *model.Repo) error {
|
||||||
|
|
||||||
func (s storage) deleteRepo(sess *xorm.Session, repo *model.Repo) error {
|
func (s storage) deleteRepo(sess *xorm.Session, repo *model.Repo) error {
|
||||||
const batchSize = perPage
|
const batchSize = perPage
|
||||||
if _, err := sess.Where("config_repo_id = ?", repo.ID).Delete(new(model.Config)); err != nil {
|
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Config)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Where("perm_repo_id = ?", repo.ID).Delete(new(model.Perm)); err != nil {
|
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Perm)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Where("registry_repo_id = ?", repo.ID).Delete(new(model.Registry)); err != nil {
|
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Registry)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Where("secret_repo_id = ?", repo.ID).Delete(new(model.Secret)); err != nil {
|
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Secret)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Redirection)); err != nil {
|
if _, err := sess.Where("repo_id = ?", repo.ID).Delete(new(model.Redirection)); err != nil {
|
||||||
|
@ -124,7 +124,7 @@ func (s storage) deleteRepo(sess *xorm.Session, repo *model.Repo) error {
|
||||||
// delete related pipelines
|
// delete related pipelines
|
||||||
for startPipelines := 0; ; startPipelines += batchSize {
|
for startPipelines := 0; ; startPipelines += batchSize {
|
||||||
pipelineIDs := make([]int64, 0, batchSize)
|
pipelineIDs := make([]int64, 0, batchSize)
|
||||||
if err := sess.Limit(batchSize, startPipelines).Table("pipelines").Cols("pipeline_id").Where("pipeline_repo_id = ?", repo.ID).Find(&pipelineIDs); err != nil {
|
if err := sess.Limit(batchSize, startPipelines).Table("pipelines").Cols("id").Where("repo_id = ?", repo.ID).Find(&pipelineIDs); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if len(pipelineIDs) == 0 {
|
if len(pipelineIDs) == 0 {
|
||||||
|
@ -146,16 +146,16 @@ func (s storage) deleteRepo(sess *xorm.Session, repo *model.Repo) error {
|
||||||
func (s storage) RepoList(user *model.User, owned, active bool) ([]*model.Repo, error) {
|
func (s storage) RepoList(user *model.User, owned, active bool) ([]*model.Repo, error) {
|
||||||
repos := make([]*model.Repo, 0)
|
repos := make([]*model.Repo, 0)
|
||||||
sess := s.engine.Table("repos").
|
sess := s.engine.Table("repos").
|
||||||
Join("INNER", "perms", "perms.perm_repo_id = repos.repo_id").
|
Join("INNER", "perms", "perms.repo_id = repos.id").
|
||||||
Where("perms.perm_user_id = ?", user.ID)
|
Where("perms.user_id = ?", user.ID)
|
||||||
if owned {
|
if owned {
|
||||||
sess = sess.And(builder.Eq{"perms.perm_push": true}.Or(builder.Eq{"perms.perm_admin": true}))
|
sess = sess.And(builder.Eq{"perms.push": true}.Or(builder.Eq{"perms.admin": true}))
|
||||||
}
|
}
|
||||||
if active {
|
if active {
|
||||||
sess = sess.And(builder.Eq{"repos.repo_active": true})
|
sess = sess.And(builder.Eq{"repos.active": true})
|
||||||
}
|
}
|
||||||
return repos, sess.
|
return repos, sess.
|
||||||
Asc("repo_full_name").
|
Asc("full_name").
|
||||||
Find(&repos)
|
Find(&repos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -164,9 +164,9 @@ func (s storage) RepoListAll(active bool, p *model.ListOptions) ([]*model.Repo,
|
||||||
repos := make([]*model.Repo, 0)
|
repos := make([]*model.Repo, 0)
|
||||||
sess := s.paginate(p).Table("repos")
|
sess := s.paginate(p).Table("repos")
|
||||||
if active {
|
if active {
|
||||||
sess = sess.And(builder.Eq{"repos.repo_active": true})
|
sess = sess.And(builder.Eq{"repos.active": true})
|
||||||
}
|
}
|
||||||
return repos, sess.
|
return repos, sess.
|
||||||
Asc("repo_full_name").
|
Asc("full_name").
|
||||||
Find(&repos)
|
Find(&repos)
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,21 +20,21 @@ import (
|
||||||
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
"go.woodpecker-ci.org/woodpecker/v2/server/model"
|
||||||
)
|
)
|
||||||
|
|
||||||
const orderSecretsBy = "secret_name"
|
const orderSecretsBy = "name"
|
||||||
|
|
||||||
func (s storage) SecretFind(repo *model.Repo, name string) (*model.Secret, error) {
|
func (s storage) SecretFind(repo *model.Repo, name string) (*model.Secret, error) {
|
||||||
secret := new(model.Secret)
|
secret := new(model.Secret)
|
||||||
return secret, wrapGet(s.engine.Where(
|
return secret, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"secret_repo_id": repo.ID, "secret_name": name},
|
builder.Eq{"repo_id": repo.ID, "name": name},
|
||||||
).Get(secret))
|
).Get(secret))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) SecretList(repo *model.Repo, includeGlobalAndOrgSecrets bool, p *model.ListOptions) ([]*model.Secret, error) {
|
func (s storage) SecretList(repo *model.Repo, includeGlobalAndOrgSecrets bool, p *model.ListOptions) ([]*model.Secret, error) {
|
||||||
var secrets []*model.Secret
|
var secrets []*model.Secret
|
||||||
var cond builder.Cond = builder.Eq{"secret_repo_id": repo.ID}
|
var cond builder.Cond = builder.Eq{"repo_id": repo.ID}
|
||||||
if includeGlobalAndOrgSecrets {
|
if includeGlobalAndOrgSecrets {
|
||||||
cond = cond.Or(builder.Eq{"secret_org_id": repo.OrgID}).
|
cond = cond.Or(builder.Eq{"org_id": repo.OrgID}).
|
||||||
Or(builder.And(builder.Eq{"secret_org_id": 0}, builder.Eq{"secret_repo_id": 0}))
|
Or(builder.And(builder.Eq{"org_id": 0}, builder.Eq{"repo_id": 0}))
|
||||||
}
|
}
|
||||||
return secrets, s.paginate(p).Where(cond).OrderBy(orderSecretsBy).Find(&secrets)
|
return secrets, s.paginate(p).Where(cond).OrderBy(orderSecretsBy).Find(&secrets)
|
||||||
}
|
}
|
||||||
|
@ -62,25 +62,25 @@ func (s storage) SecretDelete(secret *model.Secret) error {
|
||||||
func (s storage) OrgSecretFind(orgID int64, name string) (*model.Secret, error) {
|
func (s storage) OrgSecretFind(orgID int64, name string) (*model.Secret, error) {
|
||||||
secret := new(model.Secret)
|
secret := new(model.Secret)
|
||||||
return secret, wrapGet(s.engine.Where(
|
return secret, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"secret_org_id": orgID, "secret_name": name},
|
builder.Eq{"org_id": orgID, "name": name},
|
||||||
).Get(secret))
|
).Get(secret))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) OrgSecretList(orgID int64, p *model.ListOptions) ([]*model.Secret, error) {
|
func (s storage) OrgSecretList(orgID int64, p *model.ListOptions) ([]*model.Secret, error) {
|
||||||
secrets := make([]*model.Secret, 0)
|
secrets := make([]*model.Secret, 0)
|
||||||
return secrets, s.paginate(p).Where("secret_org_id = ?", orgID).OrderBy(orderSecretsBy).Find(&secrets)
|
return secrets, s.paginate(p).Where("org_id = ?", orgID).OrderBy(orderSecretsBy).Find(&secrets)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GlobalSecretFind(name string) (*model.Secret, error) {
|
func (s storage) GlobalSecretFind(name string) (*model.Secret, error) {
|
||||||
secret := new(model.Secret)
|
secret := new(model.Secret)
|
||||||
return secret, wrapGet(s.engine.Where(
|
return secret, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"secret_org_id": 0, "secret_repo_id": 0, "secret_name": name},
|
builder.Eq{"org_id": 0, "repo_id": 0, "name": name},
|
||||||
).Get(secret))
|
).Get(secret))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GlobalSecretList(p *model.ListOptions) ([]*model.Secret, error) {
|
func (s storage) GlobalSecretList(p *model.ListOptions) ([]*model.Secret, error) {
|
||||||
secrets := make([]*model.Secret, 0)
|
secrets := make([]*model.Secret, 0)
|
||||||
return secrets, s.paginate(p).Where(
|
return secrets, s.paginate(p).Where(
|
||||||
builder.Eq{"secret_org_id": 0, "secret_repo_id": 0},
|
builder.Eq{"org_id": 0, "repo_id": 0},
|
||||||
).OrderBy(orderSecretsBy).Find(&secrets)
|
).OrderBy(orderSecretsBy).Find(&secrets)
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,29 +29,29 @@ func (s storage) StepLoad(id int64) (*model.Step, error) {
|
||||||
func (s storage) StepFind(pipeline *model.Pipeline, pid int) (*model.Step, error) {
|
func (s storage) StepFind(pipeline *model.Pipeline, pid int) (*model.Step, error) {
|
||||||
step := new(model.Step)
|
step := new(model.Step)
|
||||||
return step, wrapGet(s.engine.Where(
|
return step, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"step_pipeline_id": pipeline.ID, "step_pid": pid},
|
builder.Eq{"pipeline_id": pipeline.ID, "pid": pid},
|
||||||
).Get(step))
|
).Get(step))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) StepByUUID(uuid string) (*model.Step, error) {
|
func (s storage) StepByUUID(uuid string) (*model.Step, error) {
|
||||||
step := new(model.Step)
|
step := new(model.Step)
|
||||||
return step, wrapGet(s.engine.Where(
|
return step, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"step_uuid": uuid},
|
builder.Eq{"uuid": uuid},
|
||||||
).Get(step))
|
).Get(step))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) StepChild(pipeline *model.Pipeline, ppid int, child string) (*model.Step, error) {
|
func (s storage) StepChild(pipeline *model.Pipeline, ppid int, child string) (*model.Step, error) {
|
||||||
step := new(model.Step)
|
step := new(model.Step)
|
||||||
return step, wrapGet(s.engine.Where(
|
return step, wrapGet(s.engine.Where(
|
||||||
builder.Eq{"step_pipeline_id": pipeline.ID, "step_ppid": ppid, "step_name": child},
|
builder.Eq{"pipeline_id": pipeline.ID, "ppid": ppid, "name": child},
|
||||||
).Get(step))
|
).Get(step))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) StepList(pipeline *model.Pipeline) ([]*model.Step, error) {
|
func (s storage) StepList(pipeline *model.Pipeline) ([]*model.Step, error) {
|
||||||
stepList := make([]*model.Step, 0)
|
stepList := make([]*model.Step, 0)
|
||||||
return stepList, s.engine.
|
return stepList, s.engine.
|
||||||
Where("step_pipeline_id = ?", pipeline.ID).
|
Where("pipeline_id = ?", pipeline.ID).
|
||||||
OrderBy("step_pid").
|
OrderBy("pid").
|
||||||
Find(&stepList)
|
Find(&stepList)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,9 +62,9 @@ func (s storage) StepListFromWorkflowFind(workflow *model.Workflow) ([]*model.St
|
||||||
func (s storage) stepListWorkflow(sess *xorm.Session, workflow *model.Workflow) ([]*model.Step, error) {
|
func (s storage) stepListWorkflow(sess *xorm.Session, workflow *model.Workflow) ([]*model.Step, error) {
|
||||||
stepList := make([]*model.Step, 0)
|
stepList := make([]*model.Step, 0)
|
||||||
return stepList, sess.
|
return stepList, sess.
|
||||||
Where("step_pipeline_id = ?", workflow.PipelineID).
|
Where("pipeline_id = ?", workflow.PipelineID).
|
||||||
Where("step_ppid = ?", workflow.PID).
|
Where("ppid = ?", workflow.PID).
|
||||||
OrderBy("step_pid").
|
OrderBy("pid").
|
||||||
Find(&stepList)
|
Find(&stepList)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,7 +84,7 @@ func (s storage) StepUpdate(step *model.Step) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func deleteStep(sess *xorm.Session, stepID int64) error {
|
func deleteStep(sess *xorm.Session, stepID int64) error {
|
||||||
if _, err := sess.Where("step_id = ?", stepID).Delete(new(model.LogEntry)); err != nil {
|
if _, err := sess.Where("id = ?", stepID).Delete(new(model.LogEntry)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
return wrapDelete(sess.ID(stepID).Delete(new(model.Step)))
|
return wrapDelete(sess.ID(stepID).Delete(new(model.Step)))
|
||||||
|
|
|
@ -30,5 +30,5 @@ func (s storage) TaskInsert(task *model.Task) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) TaskDelete(id string) error {
|
func (s storage) TaskDelete(id string) error {
|
||||||
return wrapDelete(s.engine.Where("task_id = ?", id).Delete(new(model.Task)))
|
return wrapDelete(s.engine.Where("id = ?", id).Delete(new(model.Task)))
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,12 +41,12 @@ func (s storage) GetUserLogin(login string) (*model.User, error) {
|
||||||
|
|
||||||
func (s storage) getUserLogin(sess *xorm.Session, login string) (*model.User, error) {
|
func (s storage) getUserLogin(sess *xorm.Session, login string) (*model.User, error) {
|
||||||
user := new(model.User)
|
user := new(model.User)
|
||||||
return user, wrapGet(sess.Where("user_login=?", login).Get(user))
|
return user, wrapGet(sess.Where("login=?", login).Get(user))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetUserList(p *model.ListOptions) ([]*model.User, error) {
|
func (s storage) GetUserList(p *model.ListOptions) ([]*model.User, error) {
|
||||||
var users []*model.User
|
var users []*model.User
|
||||||
return users, s.paginate(p).OrderBy("user_id").Find(&users)
|
return users, s.paginate(p).OrderBy("id").Find(&users)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (s storage) GetUserCount() (int64, error) {
|
func (s storage) GetUserCount() (int64, error) {
|
||||||
|
@ -89,7 +89,7 @@ func (s storage) DeleteUser(user *model.User) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := sess.Where("perm_user_id = ?", user.ID).Delete(new(model.Perm)); err != nil {
|
if _, err := sess.Where("user_id = ?", user.ID).Delete(new(model.Perm)); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -87,7 +87,7 @@ func (s storage) workflowsDelete(sess *xorm.Session, pipelineID int64) error {
|
||||||
// delete related steps
|
// delete related steps
|
||||||
for startSteps := 0; ; startSteps += perPage {
|
for startSteps := 0; ; startSteps += perPage {
|
||||||
stepIDs := make([]int64, 0, perPage)
|
stepIDs := make([]int64, 0, perPage)
|
||||||
if err := sess.Limit(perPage, startSteps).Table("steps").Cols("step_id").Where("step_pipeline_id = ?", pipelineID).Find(&stepIDs); err != nil {
|
if err := sess.Limit(perPage, startSteps).Table("steps").Cols("id").Where("pipeline_id = ?", pipelineID).Find(&stepIDs); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if len(stepIDs) == 0 {
|
if len(stepIDs) == 0 {
|
||||||
|
@ -101,7 +101,7 @@ func (s storage) workflowsDelete(sess *xorm.Session, pipelineID int64) error {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, err := sess.Where("workflow_pipeline_id = ?", pipelineID).Delete(new(model.Workflow))
|
_, err := sess.Where("pipeline_id = ?", pipelineID).Delete(new(model.Workflow))
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,8 +112,8 @@ func (s storage) WorkflowList(pipeline *model.Pipeline) ([]*model.Workflow, erro
|
||||||
// workflowList lists workflows without child steps.
|
// workflowList lists workflows without child steps.
|
||||||
func (s storage) workflowList(sess *xorm.Session, pipeline *model.Pipeline) ([]*model.Workflow, error) {
|
func (s storage) workflowList(sess *xorm.Session, pipeline *model.Pipeline) ([]*model.Workflow, error) {
|
||||||
var wfList []*model.Workflow
|
var wfList []*model.Workflow
|
||||||
err := sess.Where("workflow_pipeline_id = ?", pipeline.ID).
|
err := sess.Where("pipeline_id = ?", pipeline.ID).
|
||||||
OrderBy("workflow_pid").
|
OrderBy("pid").
|
||||||
Find(&wfList)
|
Find(&wfList)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
|
Loading…
Reference in a new issue