mirror of
https://github.com/woodpecker-ci/woodpecker.git
synced 2024-12-20 15:36:30 +00:00
parent
8dd74acdee
commit
7bacbd5699
40 changed files with 305 additions and 347 deletions
|
@ -52,8 +52,6 @@ import (
|
|||
)
|
||||
|
||||
func setupStore(c *cli.Context) (store.Store, error) {
|
||||
// TODO: find a better way than global var to pass down to allow long migrations
|
||||
server.Config.Server.Migrations.AllowLong = c.Bool("migrations-allow-long")
|
||||
datasource := c.String("datasource")
|
||||
driver := c.String("driver")
|
||||
xorm := store.XORM{
|
||||
|
@ -90,7 +88,7 @@ func setupStore(c *cli.Context) (store.Store, error) {
|
|||
log.Fatal().Err(err).Msg("could not open datastore")
|
||||
}
|
||||
|
||||
if err := store.Migrate(); err != nil {
|
||||
if err := store.Migrate(c.Bool("migrations-allow-long")); err != nil {
|
||||
log.Fatal().Err(err).Msg("could not migrate datastore")
|
||||
}
|
||||
|
||||
|
|
1
go.mod
1
go.mod
|
@ -61,6 +61,7 @@ require (
|
|||
k8s.io/api v0.28.4
|
||||
k8s.io/apimachinery v0.28.4
|
||||
k8s.io/client-go v0.28.4
|
||||
src.techknowlogick.com/xormigrate v1.7.1
|
||||
xorm.io/builder v0.3.13
|
||||
xorm.io/xorm v1.3.4
|
||||
)
|
||||
|
|
7
go.sum
7
go.sum
|
@ -62,6 +62,7 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
|||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0=
|
||||
github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE=
|
||||
github.com/denisenkom/go-mssqldb v0.12.3 h1:pBSGx9Tq67pBOTLmxNuirNTeB8Vjmf886Kx+8Y+8shw=
|
||||
github.com/denisenkom/go-mssqldb v0.12.3/go.mod h1:k0mtMFOnU+AihqFxPMiF05rtiDrorD1Vrm1KEz5hxDo=
|
||||
github.com/distribution/reference v0.5.0 h1:/FUIFXtfc/x2gpa5/VGfiGLuOIdYa1t65IKK2OFGvA0=
|
||||
github.com/distribution/reference v0.5.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
|
||||
|
@ -145,7 +146,9 @@ github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
|||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v5 v5.1.0 h1:UGKbA/IPjtS6zLcdB7i5TyACMgSbOTiR8qzXgw8HWQU=
|
||||
github.com/golang-jwt/jwt/v5 v5.1.0/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe h1:lXe2qZdvpiX5WZkZR4hgp4KJVfY3nMkvmwbVkpv1rVY=
|
||||
github.com/golang-sql/civil v0.0.0-20190719163853-cb61b32ac6fe/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
|
||||
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
|
||||
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
|
||||
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
|
||||
|
@ -236,6 +239,7 @@ github.com/jackc/puddle v1.1.3/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dv
|
|||
github.com/jackc/puddle v1.3.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jellydator/ttlcache/v3 v3.1.0 h1:0gPFG0IHHP6xyUyXq+JaD8fwkDCqgqwohXNJBcYE71g=
|
||||
github.com/jellydator/ttlcache/v3 v3.1.0/go.mod h1:hi7MGFdMAwZna5n2tuvh63DvFLzVKySzCVW6+0gA2n4=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0=
|
||||
github.com/joho/godotenv v1.5.1/go.mod h1:f4LDr5Voq0i2e/R5DDNOoa2zzDfwtkZa6DnEwAbqwq4=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
|
@ -671,8 +675,11 @@ sigs.k8s.io/structured-merge-diff/v4 v4.2.3 h1:PRbqxJClWWYMNV1dhaG4NsibJbArud9kF
|
|||
sigs.k8s.io/structured-merge-diff/v4 v4.2.3/go.mod h1:qjx8mGObPmV2aSZepjQjbmb2ihdVs8cGKBraizNC69E=
|
||||
sigs.k8s.io/yaml v1.3.0 h1:a2VclLzOGrwOHDiV8EfBGhvjHvP46CtW5j6POvhYGGo=
|
||||
sigs.k8s.io/yaml v1.3.0/go.mod h1:GeOyir5tyXNByN85N/dRIT9es5UQNerPYEKK56eTBm8=
|
||||
src.techknowlogick.com/xormigrate v1.7.1 h1:RKGLLUAqJ+zO8iZ7eOc7oLH7f0cs2gfXSZSvBRBHnlY=
|
||||
src.techknowlogick.com/xormigrate v1.7.1/go.mod h1:YGNBdj8prENlySwIKmfoEXp7ILGjAltyKFXD0qLgD7U=
|
||||
xorm.io/builder v0.3.11-0.20220531020008-1bd24a7dc978/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
||||
xorm.io/builder v0.3.13 h1:a3jmiVVL19psGeXx8GIurTp7p0IIgqeDmwhcR6BAOAo=
|
||||
xorm.io/builder v0.3.13/go.mod h1:aUW0S9eb9VCaPohFCH3j7czOx1PMW3i1HrSzbLYGBSE=
|
||||
xorm.io/xorm v1.3.3/go.mod h1:qFJGFoVYbbIdnz2vaL5OxSQ2raleMpyRRalnq3n9OJo=
|
||||
xorm.io/xorm v1.3.4 h1:vWFKzR3DhGUDl5b4srhUjhDwjxkZAc4C7BFszpu0swI=
|
||||
xorm.io/xorm v1.3.4/go.mod h1:qFJGFoVYbbIdnz2vaL5OxSQ2raleMpyRRalnq3n9OJo=
|
||||
|
|
|
@ -70,10 +70,7 @@ var Config = struct {
|
|||
RootPath string
|
||||
CustomCSSFile string
|
||||
CustomJsFile string
|
||||
Migrations struct {
|
||||
AllowLong bool
|
||||
}
|
||||
EnableSwagger bool
|
||||
EnableSwagger bool
|
||||
// Open bool
|
||||
// Orgs map[string]struct{}
|
||||
// Admins map[string]struct{}
|
||||
|
|
|
@ -16,6 +16,7 @@ package datastore
|
|||
|
||||
import (
|
||||
"github.com/rs/zerolog"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/store"
|
||||
"go.woodpecker-ci.org/woodpecker/server/store/datastore/migration"
|
||||
|
||||
|
@ -54,8 +55,8 @@ func (s storage) Ping() error {
|
|||
}
|
||||
|
||||
// Migrate old storage or init new one
|
||||
func (s storage) Migrate() error {
|
||||
return migration.Migrate(s.engine)
|
||||
func (s storage) Migrate(allowLong bool) error {
|
||||
return migration.Migrate(s.engine, allowLong)
|
||||
}
|
||||
|
||||
func (s storage) Close() error {
|
||||
|
|
33
server/store/datastore/migration/000_legacy_to_xormigrate.go
Normal file
33
server/store/datastore/migration/000_legacy_to_xormigrate.go
Normal file
|
@ -0,0 +1,33 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type v000Migrations struct {
|
||||
Name string `xorm:"UNIQUE"`
|
||||
}
|
||||
|
||||
func (m *v000Migrations) TableName() string {
|
||||
return "migrations"
|
||||
}
|
||||
|
||||
var legacyToXormigrate = xormigrate.Migration{
|
||||
ID: "legacy-to-xormigrate",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
var mig []*v000Migrations
|
||||
if err := sess.Find(&mig); err != nil {
|
||||
return err
|
||||
}
|
||||
for _, m := range mig {
|
||||
if _, err := sess.Insert(&xormigrate.Migration{
|
||||
ID: m.Name,
|
||||
}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return sess.DropTable("migrations")
|
||||
},
|
||||
}
|
|
@ -18,14 +18,14 @@ import (
|
|||
"fmt"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
var legacy2Xorm = task{
|
||||
name: "xorm",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var legacy2Xorm = xormigrate.Migration{
|
||||
ID: "xorm",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
// make sure we have required migrations - else fail and point to last major version
|
||||
for _, mig := range []string{
|
||||
// users
|
||||
|
@ -74,7 +74,7 @@ var legacy2Xorm = task{
|
|||
"create-table-build-config",
|
||||
"populate-build-config",
|
||||
} {
|
||||
exist, err := sess.Exist(&migrations{mig})
|
||||
exist, err := sess.Exist(&xormigrate.Migration{ID: mig})
|
||||
if err != nil {
|
||||
return fmt.Errorf("test migration existence: %w", err)
|
||||
}
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var alterTableReposDropFallback = task{
|
||||
name: "alter-table-drop-repo-fallback",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var alterTableReposDropFallback = xormigrate.Migration{
|
||||
ID: "alter-table-drop-repo-fallback",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return dropTableColumns(sess, "repos", "repo_fallback")
|
||||
},
|
||||
}
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var alterTableReposDropAllowDeploysAllowTags = task{
|
||||
name: "drop-allow-push-tags-deploys-columns",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var alterTableReposDropAllowDeploysAllowTags = xormigrate.Migration{
|
||||
ID: "drop-allow-push-tags-deploys-columns",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return dropTableColumns(sess, "repos",
|
||||
"repo_allow_deploys",
|
||||
"repo_allow_tags",
|
|
@ -15,14 +15,15 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
var fixPRSecretEventName = task{
|
||||
name: "fix-pr-secret-event-name",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var fixPRSecretEventName = xormigrate.Migration{
|
||||
ID: "fix-pr-secret-event-name",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
const batchSize = 100
|
||||
for start := 0; ; start += batchSize {
|
||||
secrets := make([]*model.Secret, 0, batchSize)
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var alterTableReposDropCounter = task{
|
||||
name: "alter-table-drop-counter",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var alterTableReposDropCounter = xormigrate.Migration{
|
||||
ID: "alter-table-drop-counter",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return dropTableColumns(sess, "repos", "repo_counter")
|
||||
},
|
||||
}
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var dropSenders = task{
|
||||
name: "drop-senders",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var dropSenders = xormigrate.Migration{
|
||||
ID: "drop-senders",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return sess.DropTable("senders")
|
||||
},
|
||||
}
|
|
@ -15,13 +15,14 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
var alterTableLogUpdateColumnLogDataType = task{
|
||||
name: "alter-table-logs-update-type-of-data",
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var alterTableLogUpdateColumnLogDataType = xormigrate.Migration{
|
||||
ID: "alter-table-logs-update-type-of-data",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
dialect := sess.Engine().Dialect().URI().DBType
|
||||
|
||||
switch dialect {
|
|
@ -15,24 +15,25 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type SecretV007 struct {
|
||||
type SecretV008 struct {
|
||||
Owner string `json:"-" xorm:"NOT NULL DEFAULT '' UNIQUE(s) INDEX 'secret_owner'"`
|
||||
RepoID int64 `json:"-" xorm:"NOT NULL DEFAULT 0 UNIQUE(s) INDEX 'secret_repo_id'"`
|
||||
Name string `json:"name" xorm:"NOT NULL UNIQUE(s) INDEX 'secret_name'"`
|
||||
}
|
||||
|
||||
// TableName return database table name for xorm
|
||||
func (SecretV007) TableName() string {
|
||||
func (SecretV008) TableName() string {
|
||||
return "secrets"
|
||||
}
|
||||
|
||||
var alterTableSecretsAddUserCol = task{
|
||||
name: "alter-table-add-secrets-user-id",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
if err := sess.Sync(new(SecretV007)); err != nil {
|
||||
var alterTableSecretsAddUserCol = xormigrate.Migration{
|
||||
ID: "alter-table-add-secrets-user-id",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if err := sess.Sync(new(SecretV008)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := alterColumnDefault(sess, "secrets", "secret_repo_id", "0"); err != nil {
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var lowercaseSecretNames = task{
|
||||
name: "lowercase-secret-names",
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var lowercaseSecretNames = xormigrate.Migration{
|
||||
ID: "lowercase-secret-names",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
_, err = sess.Exec("UPDATE secrets SET secret_name = LOWER(secret_name);")
|
||||
return err
|
||||
},
|
|
@ -15,14 +15,15 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
var recreateAgentsTable = task{
|
||||
name: "recreate-agents-table",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var recreateAgentsTable = xormigrate.Migration{
|
||||
ID: "recreate-agents-table",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if err := sess.DropTable("agents"); err != nil {
|
||||
return err
|
||||
}
|
|
@ -15,13 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var renameBuildsToPipeline = task{
|
||||
name: "rename-builds-to-pipeline",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var renameBuildsToPipeline = xormigrate.Migration{
|
||||
ID: "rename-builds-to-pipeline",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
err := renameTable(sess, "builds", "pipelines")
|
||||
if err != nil {
|
||||
return err
|
|
@ -17,6 +17,7 @@ package migration
|
|||
import (
|
||||
"strings"
|
||||
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
|
@ -25,10 +26,9 @@ type oldTable struct {
|
|||
columns []string
|
||||
}
|
||||
|
||||
var renameColumnsBuildsToPipeline = task{
|
||||
name: "rename-columns-builds-to-pipeline",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var renameColumnsBuildsToPipeline = xormigrate.Migration{
|
||||
ID: "rename-columns-builds-to-pipeline",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
var oldColumns []*oldTable
|
||||
|
||||
oldColumns = append(oldColumns, &oldTable{
|
|
@ -17,13 +17,13 @@ package migration
|
|||
import (
|
||||
"strings"
|
||||
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var renameTableProcsToSteps = task{
|
||||
name: "rename-procs-to-steps",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var renameTableProcsToSteps = xormigrate.Migration{
|
||||
ID: "rename-procs-to-steps",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
err := renameTable(sess, "procs", "steps")
|
||||
if err != nil {
|
||||
return err
|
|
@ -15,28 +15,28 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type oldRepo012 struct {
|
||||
type oldRepo013 struct {
|
||||
ID int64 `xorm:"pk autoincr 'repo_id'"`
|
||||
RemoteID string `xorm:"remote_id"`
|
||||
}
|
||||
|
||||
func (oldRepo012) TableName() string {
|
||||
func (oldRepo013) TableName() string {
|
||||
return "repos"
|
||||
}
|
||||
|
||||
var renameRemoteToForge = task{
|
||||
name: "rename-remote-to-forge",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var renameRemoteToForge = xormigrate.Migration{
|
||||
ID: "rename-remote-to-forge",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if err := renameColumn(sess, "pipelines", "pipeline_remote", "pipeline_clone_url"); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// make sure the column exist before rename it
|
||||
if err := sess.Sync(new(oldRepo012)); err != nil {
|
||||
if err := sess.Sync(new(oldRepo013)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -15,13 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var renameForgeIDToForgeRemoteID = task{
|
||||
name: "rename-forge-id-to-forge-remote-id",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var renameForgeIDToForgeRemoteID = xormigrate.Migration{
|
||||
ID: "rename-forge-id-to-forge-remote-id",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return renameColumn(sess, "repos", "forge_id", "forge_remote_id")
|
||||
},
|
||||
}
|
|
@ -15,13 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var removeActiveFromUsers = task{
|
||||
name: "remove-active-from-users",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var removeActiveFromUsers = xormigrate.Migration{
|
||||
ID: "remove-active-from-users",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return dropTableColumns(sess, "users", "user_active")
|
||||
},
|
||||
}
|
|
@ -15,13 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var removeInactiveRepos = task{
|
||||
name: "remove-inactive-repos",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var removeInactiveRepos = xormigrate.Migration{
|
||||
ID: "remove-inactive-repos",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
// If the timeout is 0, the repo was never activated, so we remove it.
|
||||
_, err := sess.Table("repos").Where("repo_active = ?", false).And("repo_timeout = ?", 0).Delete()
|
||||
if err != nil {
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var dropFiles = task{
|
||||
name: "drop-files",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var dropFiles = xormigrate.Migration{
|
||||
ID: "drop-files",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return sess.DropTable("files")
|
||||
},
|
||||
}
|
|
@ -15,23 +15,24 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type oldStep017 struct {
|
||||
type oldStep018 struct {
|
||||
ID int64 `xorm:"pk autoincr 'step_id'"`
|
||||
Machine string `xorm:"step_machine"`
|
||||
}
|
||||
|
||||
func (oldStep017) TableName() string {
|
||||
func (oldStep018) TableName() string {
|
||||
return "steps"
|
||||
}
|
||||
|
||||
var removeMachineCol = task{
|
||||
name: "remove-machine-col",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var removeMachineCol = xormigrate.Migration{
|
||||
ID: "remove-machine-col",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
// make sure step_machine column exists
|
||||
if err := sess.Sync(new(oldStep017)); err != nil {
|
||||
if err := sess.Sync(new(oldStep018)); err != nil {
|
||||
return err
|
||||
}
|
||||
return dropTableColumns(sess, "steps", "step_machine")
|
|
@ -15,24 +15,25 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type oldPipeline018 struct {
|
||||
type oldPipeline019 struct {
|
||||
ID int64 `xorm:"pk autoincr 'pipeline_id'"`
|
||||
Signed bool `xorm:"pipeline_signed"`
|
||||
Verified bool `xorm:"pipeline_verified"`
|
||||
}
|
||||
|
||||
func (oldPipeline018) TableName() string {
|
||||
func (oldPipeline019) TableName() string {
|
||||
return "pipelines"
|
||||
}
|
||||
|
||||
var dropOldCols = task{
|
||||
name: "drop-old-col",
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var dropOldCols = xormigrate.Migration{
|
||||
ID: "drop-old-col",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
// make sure columns on pipelines exist
|
||||
if err := sess.Sync(new(oldPipeline018)); err != nil {
|
||||
if err := sess.Sync(new(oldPipeline019)); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := dropTableColumns(sess, "steps", "step_pgid"); err != nil {
|
|
@ -22,29 +22,26 @@ import (
|
|||
|
||||
"github.com/rs/zerolog/log"
|
||||
"github.com/tevino/abool/v2"
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server"
|
||||
"go.woodpecker-ci.org/woodpecker/shared/utils"
|
||||
)
|
||||
|
||||
// maxDefaultSqliteItems set the threshold at witch point the migration will fail by default
|
||||
var maxDefaultSqliteItems019 = 5000
|
||||
// perPage020 sets the size of the slice to read per page
|
||||
var perPage020 = 100
|
||||
|
||||
// perPage019 set the size of the slice to read per page
|
||||
var perPage019 = 100
|
||||
|
||||
type oldLogs019 struct {
|
||||
type oldLogs020 struct {
|
||||
ID int64 `xorm:"pk autoincr 'log_id'"`
|
||||
StepID int64 `xorm:"UNIQUE 'log_step_id'"`
|
||||
Data []byte `xorm:"LONGBLOB 'log_data'"`
|
||||
}
|
||||
|
||||
func (oldLogs019) TableName() string {
|
||||
func (oldLogs020) TableName() string {
|
||||
return "logs"
|
||||
}
|
||||
|
||||
type oldLogEntry019 struct {
|
||||
type oldLogEntry020 struct {
|
||||
Step string `json:"step,omitempty"`
|
||||
Time int64 `json:"time,omitempty"`
|
||||
Type int `json:"type,omitempty"`
|
||||
|
@ -52,7 +49,7 @@ type oldLogEntry019 struct {
|
|||
Out string `json:"out,omitempty"`
|
||||
}
|
||||
|
||||
type newLogEntry019 struct {
|
||||
type newLogEntry020 struct {
|
||||
ID int64 `xorm:"pk autoincr 'id'"`
|
||||
StepID int64 `xorm:"'step_id'"`
|
||||
Time int64
|
||||
|
@ -62,38 +59,27 @@ type newLogEntry019 struct {
|
|||
Type int
|
||||
}
|
||||
|
||||
func (newLogEntry019) TableName() string {
|
||||
func (newLogEntry020) TableName() string {
|
||||
return "log_entries"
|
||||
}
|
||||
|
||||
var initLogsEntriesTable = task{
|
||||
name: "init-log_entries",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
return sess.Sync(new(newLogEntry019))
|
||||
var initLogsEntriesTable = xormigrate.Migration{
|
||||
ID: "init-log_entries",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
return sess.Sync(new(newLogEntry020))
|
||||
},
|
||||
}
|
||||
|
||||
var migrateLogs2LogEntries = task{
|
||||
name: "migrate-logs-to-log_entries",
|
||||
required: false,
|
||||
engineFn: func(e *xorm.Engine) error {
|
||||
var migrateLogs2LogEntries = xormigrate.Migration{
|
||||
ID: "migrate-logs-to-log_entries",
|
||||
Long: true,
|
||||
Migrate: func(e *xorm.Engine) error {
|
||||
// make sure old logs table exists
|
||||
if exist, err := e.IsTableExist(new(oldLogs019)); !exist || err != nil {
|
||||
if exist, err := e.IsTableExist(new(oldLogs020)); !exist || err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// first we check if we have just 1000 entries to migrate
|
||||
toMigrate, err := e.Count(new(oldLogs019))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if toMigrate > int64(maxDefaultSqliteItems019) && !server.Config.Server.Migrations.AllowLong {
|
||||
return fmt.Errorf("Migrating logs to log_entries is skipped, as we have %d entries to convert. Set 'WOODPECKER_MIGRATIONS_ALLOW_LONG' to 'true' to migrate anyway", toMigrate)
|
||||
}
|
||||
|
||||
if err := e.Sync(new(oldLogs019)); err != nil {
|
||||
if err := e.Sync(new(oldLogs020)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -101,8 +87,8 @@ var migrateLogs2LogEntries = task{
|
|||
|
||||
page := 0
|
||||
offset := 0
|
||||
logs := make([]*oldLogs019, 0, perPage019)
|
||||
logEntries := make([]*oldLogEntry019, 0, 50)
|
||||
logs := make([]*oldLogs020, 0, perPage020)
|
||||
logEntries := make([]*oldLogEntry020, 0, 50)
|
||||
|
||||
sigterm := abool.New()
|
||||
ctx, cancelCtx := context.WithCancelCause(context.Background())
|
||||
|
@ -124,7 +110,7 @@ var migrateLogs2LogEntries = task{
|
|||
}
|
||||
logs = logs[:0]
|
||||
|
||||
err := sess.Limit(perPage019, offset).Find(&logs)
|
||||
err := sess.Limit(perPage020, offset).Find(&logs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
@ -146,7 +132,7 @@ var migrateLogs2LogEntries = task{
|
|||
time = logEntry.Time
|
||||
}
|
||||
|
||||
log := &newLogEntry019{
|
||||
log := &newLogEntry020{
|
||||
StepID: l.StepID,
|
||||
Data: []byte(logEntry.Out),
|
||||
Line: logEntry.Pos,
|
||||
|
@ -168,7 +154,7 @@ var migrateLogs2LogEntries = task{
|
|||
return err
|
||||
}
|
||||
|
||||
if len(logs) < perPage019 {
|
||||
if len(logs) < perPage020 {
|
||||
break
|
||||
}
|
||||
|
|
@ -15,12 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
type oldStep020 struct {
|
||||
type oldStep021 struct {
|
||||
ID int64 `xorm:"pk autoincr 'step_id'"`
|
||||
PipelineID int64 `xorm:"UNIQUE(s) INDEX 'step_pipeline_id'"`
|
||||
PID int `xorm:"UNIQUE(s) 'step_pid'"`
|
||||
|
@ -35,23 +36,22 @@ type oldStep020 struct {
|
|||
Environ map[string]string `xorm:"json 'step_environ'"`
|
||||
}
|
||||
|
||||
func (oldStep020) TableName() string {
|
||||
func (oldStep021) TableName() string {
|
||||
return "steps"
|
||||
}
|
||||
|
||||
var parentStepsToWorkflows = task{
|
||||
name: "parent-steps-to-workflows",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var parentStepsToWorkflows = xormigrate.Migration{
|
||||
ID: "parent-steps-to-workflows",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if err := sess.Sync(new(model.Workflow)); err != nil {
|
||||
return err
|
||||
}
|
||||
// make sure the columns exist before removing them
|
||||
if err := sess.Sync(new(oldStep020)); err != nil {
|
||||
if err := sess.Sync(new(oldStep021)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var parentSteps []*oldStep020
|
||||
var parentSteps []*oldStep021
|
||||
err := sess.Where("step_ppid = ?", 0).Find(&parentSteps)
|
||||
if err != nil {
|
||||
return err
|
||||
|
@ -76,7 +76,7 @@ var parentStepsToWorkflows = task{
|
|||
return err
|
||||
}
|
||||
|
||||
_, err = sess.Delete(&oldStep020{ID: p.ID})
|
||||
_, err = sess.Delete(&oldStep021{ID: p.ID})
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
|
@ -18,13 +18,14 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/builder"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
type oldSecret021 struct {
|
||||
type oldSecret022 struct {
|
||||
ID int64 `xorm:"pk autoincr 'secret_id'"`
|
||||
Owner string `xorm:"'secret_owner'"`
|
||||
OrgID int64 `xorm:"NOT NULL DEFAULT 0 'secret_org_id'"`
|
||||
|
@ -32,51 +33,50 @@ type oldSecret021 struct {
|
|||
Name string `xorm:"NOT NULL INDEX 'secret_name'"`
|
||||
}
|
||||
|
||||
func (oldSecret021) TableName() string {
|
||||
func (oldSecret022) TableName() string {
|
||||
return "secrets"
|
||||
}
|
||||
|
||||
type syncRepo021 struct {
|
||||
type syncRepo022 struct {
|
||||
OrgID int64 `json:"org_id" xorm:"repo_org_id"`
|
||||
}
|
||||
|
||||
// TableName return database table name for xorm
|
||||
func (syncRepo021) TableName() string {
|
||||
func (syncRepo022) TableName() string {
|
||||
return "repos"
|
||||
}
|
||||
|
||||
type repo021 struct {
|
||||
type repo022 struct {
|
||||
ID int64 `json:"id,omitempty" xorm:"pk autoincr 'repo_id'"`
|
||||
OrgID int64 `json:"org_id" xorm:"repo_org_id"`
|
||||
Owner string `json:"owner" xorm:"UNIQUE(name) 'repo_owner'"`
|
||||
}
|
||||
|
||||
// TableName return database table name for xorm
|
||||
func (repo021) TableName() string {
|
||||
func (repo022) TableName() string {
|
||||
return "repos"
|
||||
}
|
||||
|
||||
var addOrgs = task{
|
||||
name: "add-orgs",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var addOrgs = xormigrate.Migration{
|
||||
ID: "add-orgs",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if exist, err := sess.IsTableExist("orgs"); exist && err == nil {
|
||||
if err := sess.DropTable("orgs"); err != nil {
|
||||
return fmt.Errorf("drop old orgs table failed: %w", err)
|
||||
}
|
||||
}
|
||||
|
||||
if err := sess.Sync(new(model.Org), new(syncRepo021), new(model.User)); err != nil {
|
||||
if err := sess.Sync(new(model.Org), new(syncRepo022), new(model.User)); err != nil {
|
||||
return fmt.Errorf("sync new models failed: %w", err)
|
||||
}
|
||||
|
||||
// make sure the columns exist before removing them
|
||||
if _, err := sess.SyncWithOptions(xorm.SyncOptions{IgnoreConstrains: true, IgnoreIndices: true}, new(oldSecret021)); err != nil {
|
||||
if _, err := sess.SyncWithOptions(xorm.SyncOptions{IgnoreConstrains: true, IgnoreIndices: true}, new(oldSecret022)); err != nil {
|
||||
return fmt.Errorf("sync old secrets models failed: %w", err)
|
||||
}
|
||||
|
||||
// get all org names from repos
|
||||
var repos []*repo021
|
||||
var repos []*repo022
|
||||
if err := sess.Find(&repos); err != nil {
|
||||
return fmt.Errorf("find all repos failed: %w", err)
|
||||
}
|
||||
|
@ -107,7 +107,7 @@ var addOrgs = task{
|
|||
orgs[orgName] = org
|
||||
|
||||
// update org secrets
|
||||
var secrets []*oldSecret021
|
||||
var secrets []*oldSecret022
|
||||
if err := sess.Where(builder.Eq{"secret_owner": orgName, "secret_repo_id": 0}).Find(&secrets); err != nil {
|
||||
return fmt.Errorf("get org secrets failed: %w", err)
|
||||
}
|
|
@ -17,15 +17,15 @@ package migration
|
|||
import (
|
||||
"fmt"
|
||||
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
var addOrgID = task{
|
||||
name: "add-org-id",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) error {
|
||||
var addOrgID = xormigrate.Migration{
|
||||
ID: "add-org-id",
|
||||
MigrateSession: func(sess *xorm.Session) error {
|
||||
if err := sess.Sync(new(model.User)); err != nil {
|
||||
return fmt.Errorf("sync new models failed: %w", err)
|
||||
}
|
|
@ -15,13 +15,14 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
var alterTableTasksUpdateColumnTaskDataType = task{
|
||||
name: "alter-table-tasks-update-type-of-task-data",
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var alterTableTasksUpdateColumnTaskDataType = xormigrate.Migration{
|
||||
ID: "alter-table-tasks-update-type-of-task-data",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
dialect := sess.Engine().Dialect().URI().DBType
|
||||
|
||||
switch dialect {
|
|
@ -15,13 +15,14 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
"xorm.io/xorm/schemas"
|
||||
)
|
||||
|
||||
var alterTableConfigUpdateColumnConfigDataType = task{
|
||||
name: "alter-table-config-update-type-of-config-data",
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var alterTableConfigUpdateColumnConfigDataType = xormigrate.Migration{
|
||||
ID: "alter-table-config-update-type-of-config-data",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
dialect := sess.Engine().Dialect().URI().DBType
|
||||
|
||||
switch dialect {
|
|
@ -15,10 +15,11 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
type oldSecret025 struct {
|
||||
type oldSecret026 struct {
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'secret_id'"`
|
||||
PluginsOnly bool `json:"plugins_only" xorm:"secret_plugins_only"`
|
||||
SkipVerify bool `json:"-" xorm:"secret_skip_verify"`
|
||||
|
@ -26,15 +27,15 @@ type oldSecret025 struct {
|
|||
Images []string `json:"images" xorm:"json 'secret_images'"`
|
||||
}
|
||||
|
||||
func (oldSecret025) TableName() string {
|
||||
func (oldSecret026) TableName() string {
|
||||
return "secrets"
|
||||
}
|
||||
|
||||
var removePluginOnlyOptionFromSecretsTable = task{
|
||||
name: "remove-plugin-only-option-from-secrets-table",
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var removePluginOnlyOptionFromSecretsTable = xormigrate.Migration{
|
||||
ID: "remove-plugin-only-option-from-secrets-table",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
// make sure plugin_only column exists
|
||||
if err := sess.Sync(new(oldSecret025)); err != nil {
|
||||
if err := sess.Sync(new(oldSecret026)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
@ -15,53 +15,54 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/pipeline/errors"
|
||||
)
|
||||
|
||||
// perPage026 set the size of the slice to read per page
|
||||
var perPage026 = 100
|
||||
// perPage027 set the size of the slice to read per page
|
||||
var perPage027 = 100
|
||||
|
||||
type pipeline026 struct {
|
||||
type pipeline027 struct {
|
||||
ID int64 `json:"id" xorm:"pk autoincr 'pipeline_id'"`
|
||||
Error string `json:"error" xorm:"LONGTEXT 'pipeline_error'"` // old error format
|
||||
Errors []*errors.PipelineError `json:"errors" xorm:"json 'pipeline_errors'"` // new error format
|
||||
}
|
||||
|
||||
func (pipeline026) TableName() string {
|
||||
func (pipeline027) TableName() string {
|
||||
return "pipelines"
|
||||
}
|
||||
|
||||
type PipelineError026 struct {
|
||||
type PipelineError027 struct {
|
||||
Type string `json:"type"`
|
||||
Message string `json:"message"`
|
||||
IsWarning bool `json:"is_warning"`
|
||||
Data any `json:"data"`
|
||||
}
|
||||
|
||||
var convertToNewPipelineErrorFormat = task{
|
||||
name: "convert-to-new-pipeline-error-format",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var convertToNewPipelineErrorFormat = xormigrate.Migration{
|
||||
ID: "convert-to-new-pipeline-error-format",
|
||||
Long: true,
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
// make sure pipeline_error column exists
|
||||
if err := sess.Sync(new(pipeline026)); err != nil {
|
||||
if err := sess.Sync(new(pipeline027)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
page := 0
|
||||
oldPipelines := make([]*pipeline026, 0, perPage026)
|
||||
oldPipelines := make([]*pipeline027, 0, perPage027)
|
||||
|
||||
for {
|
||||
oldPipelines = oldPipelines[:0]
|
||||
|
||||
err := sess.Limit(perPage026, page*perPage026).Cols("pipeline_id", "pipeline_error").Where("pipeline_error != ''").Find(&oldPipelines)
|
||||
err := sess.Limit(perPage027, page*perPage027).Cols("pipeline_id", "pipeline_error").Where("pipeline_error != ''").Find(&oldPipelines)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, oldPipeline := range oldPipelines {
|
||||
var newPipeline pipeline026
|
||||
var newPipeline pipeline027
|
||||
newPipeline.ID = oldPipeline.ID
|
||||
newPipeline.Errors = []*errors.PipelineError{{
|
||||
Type: "generic",
|
||||
|
@ -73,7 +74,7 @@ var convertToNewPipelineErrorFormat = task{
|
|||
}
|
||||
}
|
||||
|
||||
if len(oldPipelines) < perPage026 {
|
||||
if len(oldPipelines) < perPage027 {
|
||||
break
|
||||
}
|
||||
|
|
@ -15,13 +15,13 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
)
|
||||
|
||||
var renameLinkToURL = task{
|
||||
name: "rename-link-to-url",
|
||||
required: true,
|
||||
fn: func(sess *xorm.Session) (err error) {
|
||||
var renameLinkToURL = xormigrate.Migration{
|
||||
ID: "rename-link-to-url",
|
||||
MigrateSession: func(sess *xorm.Session) (err error) {
|
||||
if err := renameColumn(sess, "pipelines", "pipeline_link", "pipeline_forge_url"); err != nil {
|
||||
return err
|
||||
}
|
55
server/store/datastore/migration/logger.go
Normal file
55
server/store/datastore/migration/logger.go
Normal file
|
@ -0,0 +1,55 @@
|
|||
// Copyright 2023 Woodpecker Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package migration
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
)
|
||||
|
||||
type xormigrateLogger struct{}
|
||||
|
||||
func (l *xormigrateLogger) Debug(v ...interface{}) {
|
||||
log.Debug().Msg(fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Debugf(format string, v ...interface{}) {
|
||||
log.Debug().Msgf(format, v...)
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Info(v ...interface{}) {
|
||||
log.Info().Msg(fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Infof(format string, v ...interface{}) {
|
||||
log.Info().Msgf(format, v...)
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Warn(v ...interface{}) {
|
||||
log.Warn().Msg(fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Warnf(format string, v ...interface{}) {
|
||||
log.Warn().Msgf(format, v...)
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Error(v ...interface{}) {
|
||||
log.Error().Msg(fmt.Sprint(v...))
|
||||
}
|
||||
|
||||
func (l *xormigrateLogger) Errorf(format string, v ...interface{}) {
|
||||
log.Error().Msgf(format, v...)
|
||||
}
|
|
@ -15,21 +15,19 @@
|
|||
package migration
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/rs/zerolog/log"
|
||||
"src.techknowlogick.com/xormigrate"
|
||||
"xorm.io/xorm"
|
||||
|
||||
"go.woodpecker-ci.org/woodpecker/server/model"
|
||||
)
|
||||
|
||||
// APPEND NEW MIGRATIONS
|
||||
// they are executed in order and if one fails woodpecker will try to rollback that specific one and quits
|
||||
var migrationTasks = []*task{
|
||||
// they are executed in order and if one fails Xormigrate will try to rollback that specific one and quits
|
||||
var migrationTasks = []*xormigrate.Migration{
|
||||
&legacyToXormigrate,
|
||||
&legacy2Xorm,
|
||||
&alterTableReposDropFallback,
|
||||
&alterTableReposDropAllowDeploysAllowTags,
|
||||
|
@ -82,70 +80,25 @@ var allBeans = []any{
|
|||
new(model.Org),
|
||||
}
|
||||
|
||||
type migrations struct {
|
||||
Name string `xorm:"UNIQUE"`
|
||||
}
|
||||
|
||||
type task struct {
|
||||
name string
|
||||
required bool
|
||||
fn func(sess *xorm.Session) error
|
||||
// engineFn does manage session on it's own. only use it if you really need to
|
||||
engineFn func(e *xorm.Engine) error
|
||||
}
|
||||
|
||||
// initNew create tables for new instance
|
||||
func initNew(sess *xorm.Session) error {
|
||||
if err := syncAll(sess); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// dummy run migrations
|
||||
for _, task := range migrationTasks {
|
||||
if _, err := sess.Insert(&migrations{task.name}); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func Migrate(e *xorm.Engine) error {
|
||||
func Migrate(e *xorm.Engine, allowLong bool) error {
|
||||
e.SetDisableGlobalCache(true)
|
||||
|
||||
if err := e.Sync(new(migrations)); err != nil {
|
||||
return fmt.Errorf("error to create migrations table: %w", err)
|
||||
m := xormigrate.New(e, migrationTasks)
|
||||
m.AllowLong(allowLong)
|
||||
oldCount, err := e.Table("migrations").Count()
|
||||
if oldCount < 1 || err != nil {
|
||||
// allow new schema initialization if old migrations table is empty or it does not exist (err != nil)
|
||||
// schema initialization will always run if we call `InitSchema`
|
||||
m.InitSchema(func(engine *xorm.Engine) error {
|
||||
// do nothing on schema init, models are synced in any case below
|
||||
return nil
|
||||
})
|
||||
}
|
||||
|
||||
sess := e.NewSession()
|
||||
defer sess.Close()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return fmt.Errorf("could not create initial migration session: %w", err)
|
||||
}
|
||||
m.SetLogger(&xormigrateLogger{})
|
||||
|
||||
// check if we have a fresh installation or need to check for migrations
|
||||
c, err := sess.Count(new(migrations))
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not count migrations: %w", err)
|
||||
}
|
||||
|
||||
if c == 0 {
|
||||
if err := initNew(sess); err != nil {
|
||||
return fmt.Errorf("could not init a new database: %w", err)
|
||||
}
|
||||
if err := sess.Commit(); err != nil {
|
||||
return fmt.Errorf("could not commit initial migration session: %w", err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
if err := sess.Commit(); err != nil {
|
||||
return fmt.Errorf("could not commit initial migration session: %w", err)
|
||||
}
|
||||
|
||||
if err := runTasks(e, migrationTasks); err != nil {
|
||||
return fmt.Errorf("run tasks failed: %w", err)
|
||||
if err := m.Migrate(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
e.SetDisableGlobalCache(false)
|
||||
|
@ -157,74 +110,7 @@ func Migrate(e *xorm.Engine) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func runTasks(e *xorm.Engine, tasks []*task) error {
|
||||
// cache migrations in db
|
||||
migCache := make(map[string]bool)
|
||||
var migList []*migrations
|
||||
if err := e.Find(&migList); err != nil {
|
||||
return err
|
||||
}
|
||||
for i := range migList {
|
||||
migCache[migList[i].Name] = true
|
||||
}
|
||||
|
||||
for _, task := range tasks {
|
||||
if migCache[task.name] {
|
||||
log.Trace().Msgf("migration task '%s' already applied", task.name)
|
||||
continue
|
||||
}
|
||||
|
||||
log.Trace().Msgf("start migration task '%s'", task.name)
|
||||
aliveMsgCancel := showBeAliveSign(task.name)
|
||||
defer aliveMsgCancel(nil)
|
||||
var taskErr error
|
||||
if task.fn != nil {
|
||||
sess := e.NewSession().NoCache()
|
||||
defer sess.Close()
|
||||
if err := sess.Begin(); err != nil {
|
||||
return fmt.Errorf("could not begin session for '%s': %w", task.name, err)
|
||||
}
|
||||
|
||||
if taskErr = task.fn(sess); taskErr != nil {
|
||||
aliveMsgCancel(nil)
|
||||
if err := sess.Rollback(); err != nil {
|
||||
taskErr = errors.Join(taskErr, err)
|
||||
}
|
||||
} else if err := sess.Commit(); err != nil {
|
||||
return fmt.Errorf("could not commit session for '%s': %w", task.name, err)
|
||||
}
|
||||
} else if task.engineFn != nil {
|
||||
taskErr = task.engineFn(e)
|
||||
} else {
|
||||
log.Trace().Msgf("skip migration task '%s'", task.name)
|
||||
aliveMsgCancel(nil)
|
||||
continue
|
||||
}
|
||||
|
||||
aliveMsgCancel(nil)
|
||||
if taskErr != nil {
|
||||
if task.required {
|
||||
return fmt.Errorf("migration task '%s' failed: %w", task.name, taskErr)
|
||||
}
|
||||
log.Error().Err(taskErr).Msgf("migration task '%s' failed but is not required", task.name)
|
||||
continue
|
||||
}
|
||||
log.Debug().Msgf("migration task '%s' done", task.name)
|
||||
|
||||
if _, err := e.Insert(&migrations{task.name}); err != nil {
|
||||
return fmt.Errorf("migration task '%s' could not be marked as finished: %w", task.name, err)
|
||||
}
|
||||
|
||||
migCache[task.name] = true
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type syncEngine interface {
|
||||
Sync(beans ...any) error
|
||||
}
|
||||
|
||||
func syncAll(sess syncEngine) error {
|
||||
func syncAll(sess *xorm.Engine) error {
|
||||
for _, bean := range allBeans {
|
||||
if err := sess.Sync(bean); err != nil {
|
||||
return fmt.Errorf("Sync error '%s': %w", reflect.TypeOf(bean), err)
|
||||
|
@ -232,20 +118,3 @@ func syncAll(sess syncEngine) error {
|
|||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var showBeAliveSignDelay = time.Second * 20
|
||||
|
||||
func showBeAliveSign(taskName string) context.CancelCauseFunc {
|
||||
ctx, cancel := context.WithCancelCause(context.Background())
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ctx.Done():
|
||||
return
|
||||
case <-time.After(showBeAliveSignDelay):
|
||||
log.Info().Msgf("Migration '%s' is still running, please be patient", taskName)
|
||||
}
|
||||
}
|
||||
}()
|
||||
return cancel
|
||||
}
|
||||
|
|
|
@ -94,14 +94,9 @@ func testDB(t *testing.T, new bool) (engine *xorm.Engine, closeDB func()) {
|
|||
}
|
||||
|
||||
func TestMigrate(t *testing.T) {
|
||||
// make all tasks required for tests
|
||||
for _, task := range migrationTasks {
|
||||
task.required = true
|
||||
}
|
||||
|
||||
// init new db
|
||||
engine, closeDB := testDB(t, true)
|
||||
assert.NoError(t, Migrate(engine))
|
||||
assert.NoError(t, Migrate(engine, true))
|
||||
closeDB()
|
||||
|
||||
dbType := engine.Dialect().URI().DBType
|
||||
|
@ -112,6 +107,6 @@ func TestMigrate(t *testing.T) {
|
|||
|
||||
// migrate old db
|
||||
engine, closeDB = testDB(t, false)
|
||||
assert.NoError(t, Migrate(engine))
|
||||
assert.NoError(t, Migrate(engine, true))
|
||||
closeDB()
|
||||
}
|
||||
|
|
|
@ -1155,13 +1155,13 @@ func (_m *Store) LogSave(_a0 *model.Step, _a1 []*model.LogEntry) error {
|
|||
return r0
|
||||
}
|
||||
|
||||
// Migrate provides a mock function with given fields:
|
||||
func (_m *Store) Migrate() error {
|
||||
ret := _m.Called()
|
||||
// Migrate provides a mock function with given fields: _a0
|
||||
func (_m *Store) Migrate(_a0 bool) error {
|
||||
ret := _m.Called(_a0)
|
||||
|
||||
var r0 error
|
||||
if rf, ok := ret.Get(0).(func() error); ok {
|
||||
r0 = rf()
|
||||
if rf, ok := ret.Get(0).(func(bool) error); ok {
|
||||
r0 = rf(_a0)
|
||||
} else {
|
||||
r0 = ret.Error(0)
|
||||
}
|
||||
|
|
|
@ -197,5 +197,5 @@ type Store interface {
|
|||
// Store operations
|
||||
Ping() error
|
||||
Close() error
|
||||
Migrate() error
|
||||
Migrate(bool) error
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue