summaryrefslogtreecommitdiffstats
path: root/models/migrations/v1_20
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
commitdd136858f1ea40ad3c94191d647487fa4f31926c (patch)
tree58fec94a7b2a12510c9664b21793f1ed560c6518 /models/migrations/v1_20
parentInitial commit. (diff)
downloadforgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.tar.xz
forgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.zip
Adding upstream version 9.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to 'models/migrations/v1_20')
-rw-r--r--models/migrations/v1_20/main_test.go14
-rw-r--r--models/migrations/v1_20/v244.go22
-rw-r--r--models/migrations/v1_20/v245.go69
-rw-r--r--models/migrations/v1_20/v246.go16
-rw-r--r--models/migrations/v1_20/v247.go50
-rw-r--r--models/migrations/v1_20/v248.go14
-rw-r--r--models/migrations/v1_20/v249.go45
-rw-r--r--models/migrations/v1_20/v250.go135
-rw-r--r--models/migrations/v1_20/v251.go47
-rw-r--r--models/migrations/v1_20/v252.go47
-rw-r--r--models/migrations/v1_20/v253.go49
-rw-r--r--models/migrations/v1_20/v254.go18
-rw-r--r--models/migrations/v1_20/v255.go23
-rw-r--r--models/migrations/v1_20/v256.go23
-rw-r--r--models/migrations/v1_20/v257.go33
-rw-r--r--models/migrations/v1_20/v258.go16
-rw-r--r--models/migrations/v1_20/v259.go360
-rw-r--r--models/migrations/v1_20/v259_test.go111
18 files changed, 1092 insertions, 0 deletions
diff --git a/models/migrations/v1_20/main_test.go b/models/migrations/v1_20/main_test.go
new file mode 100644
index 0000000..e8d95b0
--- /dev/null
+++ b/models/migrations/v1_20/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "testing"
+
+ migration_tests "code.gitea.io/gitea/models/migrations/test"
+)
+
+func TestMain(m *testing.M) {
+ migration_tests.MainTest(m)
+}
diff --git a/models/migrations/v1_20/v244.go b/models/migrations/v1_20/v244.go
new file mode 100644
index 0000000..977566a
--- /dev/null
+++ b/models/migrations/v1_20/v244.go
@@ -0,0 +1,22 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddNeedApprovalToActionRun(x *xorm.Engine) error {
+ /*
+ New index: TriggerUserID
+ New fields: NeedApproval, ApprovedBy
+ */
+ type ActionRun struct {
+ TriggerUserID int64 `xorm:"index"`
+ NeedApproval bool // may need approval if it's a fork pull request
+ ApprovedBy int64 `xorm:"index"` // who approved
+ }
+
+ return x.Sync(new(ActionRun))
+}
diff --git a/models/migrations/v1_20/v245.go b/models/migrations/v1_20/v245.go
new file mode 100644
index 0000000..b0d4c21
--- /dev/null
+++ b/models/migrations/v1_20/v245.go
@@ -0,0 +1,69 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/setting"
+
+ "xorm.io/xorm"
+)
+
+func RenameWebhookOrgToOwner(x *xorm.Engine) error {
+ type Webhook struct {
+ OrgID int64 `xorm:"INDEX"`
+ }
+
+ // This migration maybe rerun so that we should check if it has been run
+ ownerExist, err := x.Dialect().IsColumnExist(x.DB(), context.Background(), "webhook", "owner_id")
+ if err != nil {
+ return err
+ }
+
+ if ownerExist {
+ orgExist, err := x.Dialect().IsColumnExist(x.DB(), context.Background(), "webhook", "org_id")
+ if err != nil {
+ return err
+ }
+ if !orgExist {
+ return nil
+ }
+ }
+
+ sess := x.NewSession()
+ defer sess.Close()
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ if err := sess.Sync(new(Webhook)); err != nil {
+ return err
+ }
+
+ if ownerExist {
+ if err := base.DropTableColumns(sess, "webhook", "owner_id"); err != nil {
+ return err
+ }
+ }
+
+ if setting.Database.Type.IsMySQL() {
+ inferredTable, err := x.TableInfo(new(Webhook))
+ if err != nil {
+ return err
+ }
+ sqlType := x.Dialect().SQLType(inferredTable.GetColumn("org_id"))
+ if _, err := sess.Exec(fmt.Sprintf("ALTER TABLE `webhook` CHANGE org_id owner_id %s", sqlType)); err != nil {
+ return err
+ }
+ } else {
+ if _, err := sess.Exec("ALTER TABLE `webhook` RENAME COLUMN org_id TO owner_id"); err != nil {
+ return err
+ }
+ }
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v246.go b/models/migrations/v1_20/v246.go
new file mode 100644
index 0000000..e6340ef
--- /dev/null
+++ b/models/migrations/v1_20/v246.go
@@ -0,0 +1,16 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddNewColumnForProject(x *xorm.Engine) error {
+ type Project struct {
+ OwnerID int64 `xorm:"INDEX"`
+ }
+
+ return x.Sync(new(Project))
+}
diff --git a/models/migrations/v1_20/v247.go b/models/migrations/v1_20/v247.go
new file mode 100644
index 0000000..59fc5c4
--- /dev/null
+++ b/models/migrations/v1_20/v247.go
@@ -0,0 +1,50 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/xorm"
+)
+
+// FixIncorrectProjectType: set individual project's type from 3(TypeOrganization) to 1(TypeIndividual)
+func FixIncorrectProjectType(x *xorm.Engine) error {
+ type User struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type int
+ }
+
+ const (
+ UserTypeIndividual int = 0
+
+ TypeIndividual uint8 = 1
+ TypeOrganization uint8 = 3
+ )
+
+ type Project struct {
+ OwnerID int64 `xorm:"INDEX"`
+ Type uint8
+ Owner *User `xorm:"extends"`
+ }
+
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ count, err := sess.Table("project").
+ Where("type = ? AND owner_id IN (SELECT id FROM `user` WHERE type = ?)", TypeOrganization, UserTypeIndividual).
+ Update(&Project{
+ Type: TypeIndividual,
+ })
+ if err != nil {
+ return err
+ }
+ log.Debug("Updated %d projects to belong to a user instead of an organization", count)
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v248.go b/models/migrations/v1_20/v248.go
new file mode 100644
index 0000000..4055521
--- /dev/null
+++ b/models/migrations/v1_20/v248.go
@@ -0,0 +1,14 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import "xorm.io/xorm"
+
+func AddVersionToActionRunner(x *xorm.Engine) error {
+ type ActionRunner struct {
+ Version string `xorm:"VARCHAR(64)"` // the version of act_runner
+ }
+
+ return x.Sync(new(ActionRunner))
+}
diff --git a/models/migrations/v1_20/v249.go b/models/migrations/v1_20/v249.go
new file mode 100644
index 0000000..02951a7
--- /dev/null
+++ b/models/migrations/v1_20/v249.go
@@ -0,0 +1,45 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/xorm"
+ "xorm.io/xorm/schemas"
+)
+
+type Action struct {
+ UserID int64 // Receiver user id.
+ ActUserID int64 // Action user id.
+ RepoID int64
+ IsDeleted bool `xorm:"NOT NULL DEFAULT false"`
+ IsPrivate bool `xorm:"NOT NULL DEFAULT false"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+}
+
+// TableName sets the name of this table
+func (a *Action) TableName() string {
+ return "action"
+}
+
+// TableIndices implements xorm's TableIndices interface
+func (a *Action) TableIndices() []*schemas.Index {
+ repoIndex := schemas.NewIndex("r_u_d", schemas.IndexType)
+ repoIndex.AddColumn("repo_id", "user_id", "is_deleted")
+
+ actUserIndex := schemas.NewIndex("au_r_c_u_d", schemas.IndexType)
+ actUserIndex.AddColumn("act_user_id", "repo_id", "created_unix", "user_id", "is_deleted")
+
+ cudIndex := schemas.NewIndex("c_u_d", schemas.IndexType)
+ cudIndex.AddColumn("created_unix", "user_id", "is_deleted")
+
+ indices := []*schemas.Index{actUserIndex, repoIndex, cudIndex}
+
+ return indices
+}
+
+func ImproveActionTableIndices(x *xorm.Engine) error {
+ return x.Sync(new(Action))
+}
diff --git a/models/migrations/v1_20/v250.go b/models/migrations/v1_20/v250.go
new file mode 100644
index 0000000..86388ef
--- /dev/null
+++ b/models/migrations/v1_20/v250.go
@@ -0,0 +1,135 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/modules/json"
+
+ "xorm.io/xorm"
+)
+
+func ChangeContainerMetadataMultiArch(x *xorm.Engine) error {
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ type PackageVersion struct {
+ ID int64 `xorm:"pk autoincr"`
+ MetadataJSON string `xorm:"metadata_json"`
+ }
+
+ type PackageBlob struct{}
+
+ // Get all relevant packages (manifest list images have a container.manifest.reference property)
+
+ var pvs []*PackageVersion
+ err := sess.
+ Table("package_version").
+ Select("id, metadata_json").
+ Where("id IN (SELECT DISTINCT ref_id FROM package_property WHERE ref_type = 0 AND name = 'container.manifest.reference')").
+ Find(&pvs)
+ if err != nil {
+ return err
+ }
+
+ type MetadataOld struct {
+ Type string `json:"type"`
+ IsTagged bool `json:"is_tagged"`
+ Platform string `json:"platform,omitempty"`
+ Description string `json:"description,omitempty"`
+ Authors []string `json:"authors,omitempty"`
+ Licenses string `json:"license,omitempty"`
+ ProjectURL string `json:"project_url,omitempty"`
+ RepositoryURL string `json:"repository_url,omitempty"`
+ DocumentationURL string `json:"documentation_url,omitempty"`
+ Labels map[string]string `json:"labels,omitempty"`
+ ImageLayers []string `json:"layer_creation,omitempty"`
+ MultiArch map[string]string `json:"multiarch,omitempty"`
+ }
+
+ type Manifest struct {
+ Platform string `json:"platform"`
+ Digest string `json:"digest"`
+ Size int64 `json:"size"`
+ }
+
+ type MetadataNew struct {
+ Type string `json:"type"`
+ IsTagged bool `json:"is_tagged"`
+ Platform string `json:"platform,omitempty"`
+ Description string `json:"description,omitempty"`
+ Authors []string `json:"authors,omitempty"`
+ Licenses string `json:"license,omitempty"`
+ ProjectURL string `json:"project_url,omitempty"`
+ RepositoryURL string `json:"repository_url,omitempty"`
+ DocumentationURL string `json:"documentation_url,omitempty"`
+ Labels map[string]string `json:"labels,omitempty"`
+ ImageLayers []string `json:"layer_creation,omitempty"`
+ Manifests []*Manifest `json:"manifests,omitempty"`
+ }
+
+ for _, pv := range pvs {
+ var old *MetadataOld
+ if err := json.Unmarshal([]byte(pv.MetadataJSON), &old); err != nil {
+ return err
+ }
+
+ // Calculate the size of every contained manifest
+
+ manifests := make([]*Manifest, 0, len(old.MultiArch))
+ for platform, digest := range old.MultiArch {
+ size, err := sess.
+ Table("package_blob").
+ Join("INNER", "package_file", "package_blob.id = package_file.blob_id").
+ Join("INNER", "package_version pv", "pv.id = package_file.version_id").
+ Join("INNER", "package_version pv2", "pv2.package_id = pv.package_id").
+ Where("pv.lower_version = ? AND pv2.id = ?", strings.ToLower(digest), pv.ID).
+ SumInt(new(PackageBlob), "size")
+ if err != nil {
+ return err
+ }
+
+ manifests = append(manifests, &Manifest{
+ Platform: platform,
+ Digest: digest,
+ Size: size,
+ })
+ }
+
+ // Convert to new metadata format
+
+ newMetadata := &MetadataNew{
+ Type: old.Type,
+ IsTagged: old.IsTagged,
+ Platform: old.Platform,
+ Description: old.Description,
+ Authors: old.Authors,
+ Licenses: old.Licenses,
+ ProjectURL: old.ProjectURL,
+ RepositoryURL: old.RepositoryURL,
+ DocumentationURL: old.DocumentationURL,
+ Labels: old.Labels,
+ ImageLayers: old.ImageLayers,
+ Manifests: manifests,
+ }
+
+ metadataJSON, err := json.Marshal(newMetadata)
+ if err != nil {
+ return err
+ }
+
+ pv.MetadataJSON = string(metadataJSON)
+
+ if _, err := sess.ID(pv.ID).Update(pv); err != nil {
+ return err
+ }
+ }
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v251.go b/models/migrations/v1_20/v251.go
new file mode 100644
index 0000000..7743248
--- /dev/null
+++ b/models/migrations/v1_20/v251.go
@@ -0,0 +1,47 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/xorm"
+)
+
+func FixIncorrectOwnerTeamUnitAccessMode(x *xorm.Engine) error {
+ type UnitType int
+ type AccessMode int
+
+ type TeamUnit struct {
+ ID int64 `xorm:"pk autoincr"`
+ OrgID int64 `xorm:"INDEX"`
+ TeamID int64 `xorm:"UNIQUE(s)"`
+ Type UnitType `xorm:"UNIQUE(s)"`
+ AccessMode AccessMode
+ }
+
+ const (
+ // AccessModeOwner owner access
+ AccessModeOwner = 4
+ )
+
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ count, err := sess.Table("team_unit").
+ Where("team_id IN (SELECT id FROM team WHERE authorize = ?)", AccessModeOwner).
+ Update(&TeamUnit{
+ AccessMode: AccessModeOwner,
+ })
+ if err != nil {
+ return err
+ }
+ log.Debug("Updated %d owner team unit access mode to belong to owner instead of none", count)
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v252.go b/models/migrations/v1_20/v252.go
new file mode 100644
index 0000000..ab61cd9
--- /dev/null
+++ b/models/migrations/v1_20/v252.go
@@ -0,0 +1,47 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/xorm"
+)
+
+func FixIncorrectAdminTeamUnitAccessMode(x *xorm.Engine) error {
+ type UnitType int
+ type AccessMode int
+
+ type TeamUnit struct {
+ ID int64 `xorm:"pk autoincr"`
+ OrgID int64 `xorm:"INDEX"`
+ TeamID int64 `xorm:"UNIQUE(s)"`
+ Type UnitType `xorm:"UNIQUE(s)"`
+ AccessMode AccessMode
+ }
+
+ const (
+ // AccessModeAdmin admin access
+ AccessModeAdmin = 3
+ )
+
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ count, err := sess.Table("team_unit").
+ Where("team_id IN (SELECT id FROM team WHERE authorize = ?)", AccessModeAdmin).
+ Update(&TeamUnit{
+ AccessMode: AccessModeAdmin,
+ })
+ if err != nil {
+ return err
+ }
+ log.Debug("Updated %d admin team unit access mode to belong to admin instead of none", count)
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v253.go b/models/migrations/v1_20/v253.go
new file mode 100644
index 0000000..96c494b
--- /dev/null
+++ b/models/migrations/v1_20/v253.go
@@ -0,0 +1,49 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/xorm"
+)
+
+func FixExternalTrackerAndExternalWikiAccessModeInOwnerAndAdminTeam(x *xorm.Engine) error {
+ type UnitType int
+ type AccessMode int
+
+ type TeamUnit struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type UnitType `xorm:"UNIQUE(s)"`
+ AccessMode AccessMode
+ }
+
+ const (
+ // AccessModeRead read access
+ AccessModeRead = 1
+
+ // Unit Type
+ TypeExternalWiki = 6
+ TypeExternalTracker = 7
+ )
+
+ sess := x.NewSession()
+ defer sess.Close()
+
+ if err := sess.Begin(); err != nil {
+ return err
+ }
+
+ count, err := sess.Table("team_unit").
+ Where("type IN (?, ?) AND access_mode > ?", TypeExternalWiki, TypeExternalTracker, AccessModeRead).
+ Update(&TeamUnit{
+ AccessMode: AccessModeRead,
+ })
+ if err != nil {
+ return err
+ }
+ log.Debug("Updated %d ExternalTracker and ExternalWiki access mode to belong to owner and admin", count)
+
+ return sess.Commit()
+}
diff --git a/models/migrations/v1_20/v254.go b/models/migrations/v1_20/v254.go
new file mode 100644
index 0000000..1e26979
--- /dev/null
+++ b/models/migrations/v1_20/v254.go
@@ -0,0 +1,18 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddActionTaskOutputTable(x *xorm.Engine) error {
+ type ActionTaskOutput struct {
+ ID int64
+ TaskID int64 `xorm:"INDEX UNIQUE(task_id_output_key)"`
+ OutputKey string `xorm:"VARCHAR(255) UNIQUE(task_id_output_key)"`
+ OutputValue string `xorm:"MEDIUMTEXT"`
+ }
+ return x.Sync(new(ActionTaskOutput))
+}
diff --git a/models/migrations/v1_20/v255.go b/models/migrations/v1_20/v255.go
new file mode 100644
index 0000000..14b70f8
--- /dev/null
+++ b/models/migrations/v1_20/v255.go
@@ -0,0 +1,23 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/xorm"
+)
+
+func AddArchivedUnixToRepository(x *xorm.Engine) error {
+ type Repository struct {
+ ArchivedUnix timeutil.TimeStamp `xorm:"DEFAULT 0"`
+ }
+
+ if err := x.Sync(new(Repository)); err != nil {
+ return err
+ }
+
+ _, err := x.Exec("UPDATE repository SET archived_unix = updated_unix WHERE is_archived = ? AND archived_unix = 0", true)
+ return err
+}
diff --git a/models/migrations/v1_20/v256.go b/models/migrations/v1_20/v256.go
new file mode 100644
index 0000000..822153b
--- /dev/null
+++ b/models/migrations/v1_20/v256.go
@@ -0,0 +1,23 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddIsInternalColumnToPackage(x *xorm.Engine) error {
+ type Package struct {
+ ID int64 `xorm:"pk autoincr"`
+ OwnerID int64 `xorm:"UNIQUE(s) INDEX NOT NULL"`
+ RepoID int64 `xorm:"INDEX"`
+ Type string `xorm:"UNIQUE(s) INDEX NOT NULL"`
+ Name string `xorm:"NOT NULL"`
+ LowerName string `xorm:"UNIQUE(s) INDEX NOT NULL"`
+ SemverCompatible bool `xorm:"NOT NULL DEFAULT false"`
+ IsInternal bool `xorm:"NOT NULL DEFAULT false"`
+ }
+
+ return x.Sync(new(Package))
+}
diff --git a/models/migrations/v1_20/v257.go b/models/migrations/v1_20/v257.go
new file mode 100644
index 0000000..6c6ca4c
--- /dev/null
+++ b/models/migrations/v1_20/v257.go
@@ -0,0 +1,33 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/xorm"
+)
+
+func CreateActionArtifactTable(x *xorm.Engine) error {
+ // ActionArtifact is a file that is stored in the artifact storage.
+ type ActionArtifact struct {
+ ID int64 `xorm:"pk autoincr"`
+ RunID int64 `xorm:"index UNIQUE(runid_name)"` // The run id of the artifact
+ RunnerID int64
+ RepoID int64 `xorm:"index"`
+ OwnerID int64
+ CommitSHA string
+ StoragePath string // The path to the artifact in the storage
+ FileSize int64 // The size of the artifact in bytes
+ FileCompressedSize int64 // The size of the artifact in bytes after gzip compression
+ ContentEncoding string // The content encoding of the artifact
+ ArtifactPath string // The path to the artifact when runner uploads it
+ ArtifactName string `xorm:"UNIQUE(runid_name)"` // The name of the artifact when runner uploads it
+ Status int64 `xorm:"index"` // The status of the artifact
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
+ }
+
+ return x.Sync(new(ActionArtifact))
+}
diff --git a/models/migrations/v1_20/v258.go b/models/migrations/v1_20/v258.go
new file mode 100644
index 0000000..47174ce
--- /dev/null
+++ b/models/migrations/v1_20/v258.go
@@ -0,0 +1,16 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "xorm.io/xorm"
+)
+
+func AddPinOrderToIssue(x *xorm.Engine) error {
+ type Issue struct {
+ PinOrder int `xorm:"DEFAULT 0"`
+ }
+
+ return x.Sync(new(Issue))
+}
diff --git a/models/migrations/v1_20/v259.go b/models/migrations/v1_20/v259.go
new file mode 100644
index 0000000..5b8ced4
--- /dev/null
+++ b/models/migrations/v1_20/v259.go
@@ -0,0 +1,360 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+
+ "xorm.io/xorm"
+)
+
+// unknownAccessTokenScope represents the scope for an access token that isn't
+// known be an old token or a new token.
+type unknownAccessTokenScope string
+
+// AccessTokenScope represents the scope for an access token.
+type AccessTokenScope string
+
+// for all categories, write implies read
+const (
+ AccessTokenScopeAll AccessTokenScope = "all"
+ AccessTokenScopePublicOnly AccessTokenScope = "public-only" // limited to public orgs/repos
+
+ AccessTokenScopeReadActivityPub AccessTokenScope = "read:activitypub"
+ AccessTokenScopeWriteActivityPub AccessTokenScope = "write:activitypub"
+
+ AccessTokenScopeReadAdmin AccessTokenScope = "read:admin"
+ AccessTokenScopeWriteAdmin AccessTokenScope = "write:admin"
+
+ AccessTokenScopeReadMisc AccessTokenScope = "read:misc"
+ AccessTokenScopeWriteMisc AccessTokenScope = "write:misc"
+
+ AccessTokenScopeReadNotification AccessTokenScope = "read:notification"
+ AccessTokenScopeWriteNotification AccessTokenScope = "write:notification"
+
+ AccessTokenScopeReadOrganization AccessTokenScope = "read:organization"
+ AccessTokenScopeWriteOrganization AccessTokenScope = "write:organization"
+
+ AccessTokenScopeReadPackage AccessTokenScope = "read:package"
+ AccessTokenScopeWritePackage AccessTokenScope = "write:package"
+
+ AccessTokenScopeReadIssue AccessTokenScope = "read:issue"
+ AccessTokenScopeWriteIssue AccessTokenScope = "write:issue"
+
+ AccessTokenScopeReadRepository AccessTokenScope = "read:repository"
+ AccessTokenScopeWriteRepository AccessTokenScope = "write:repository"
+
+ AccessTokenScopeReadUser AccessTokenScope = "read:user"
+ AccessTokenScopeWriteUser AccessTokenScope = "write:user"
+)
+
+// accessTokenScopeBitmap represents a bitmap of access token scopes.
+type accessTokenScopeBitmap uint64
+
+// Bitmap of each scope, including the child scopes.
+const (
+ // AccessTokenScopeAllBits is the bitmap of all access token scopes
+ accessTokenScopeAllBits accessTokenScopeBitmap = accessTokenScopeWriteActivityPubBits |
+ accessTokenScopeWriteAdminBits | accessTokenScopeWriteMiscBits | accessTokenScopeWriteNotificationBits |
+ accessTokenScopeWriteOrganizationBits | accessTokenScopeWritePackageBits | accessTokenScopeWriteIssueBits |
+ accessTokenScopeWriteRepositoryBits | accessTokenScopeWriteUserBits
+
+ accessTokenScopePublicOnlyBits accessTokenScopeBitmap = 1 << iota
+
+ accessTokenScopeReadActivityPubBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteActivityPubBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadActivityPubBits
+
+ accessTokenScopeReadAdminBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteAdminBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadAdminBits
+
+ accessTokenScopeReadMiscBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteMiscBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadMiscBits
+
+ accessTokenScopeReadNotificationBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteNotificationBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadNotificationBits
+
+ accessTokenScopeReadOrganizationBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteOrganizationBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadOrganizationBits
+
+ accessTokenScopeReadPackageBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWritePackageBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadPackageBits
+
+ accessTokenScopeReadIssueBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteIssueBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadIssueBits
+
+ accessTokenScopeReadRepositoryBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteRepositoryBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadRepositoryBits
+
+ accessTokenScopeReadUserBits accessTokenScopeBitmap = 1 << iota
+ accessTokenScopeWriteUserBits accessTokenScopeBitmap = 1<<iota | accessTokenScopeReadUserBits
+
+ // The current implementation only supports up to 64 token scopes.
+ // If we need to support > 64 scopes,
+ // refactoring the whole implementation in this file (and only this file) is needed.
+)
+
+// allAccessTokenScopes contains all access token scopes.
+// The order is important: parent scope must precede child scopes.
+var allAccessTokenScopes = []AccessTokenScope{
+ AccessTokenScopePublicOnly,
+ AccessTokenScopeWriteActivityPub, AccessTokenScopeReadActivityPub,
+ AccessTokenScopeWriteAdmin, AccessTokenScopeReadAdmin,
+ AccessTokenScopeWriteMisc, AccessTokenScopeReadMisc,
+ AccessTokenScopeWriteNotification, AccessTokenScopeReadNotification,
+ AccessTokenScopeWriteOrganization, AccessTokenScopeReadOrganization,
+ AccessTokenScopeWritePackage, AccessTokenScopeReadPackage,
+ AccessTokenScopeWriteIssue, AccessTokenScopeReadIssue,
+ AccessTokenScopeWriteRepository, AccessTokenScopeReadRepository,
+ AccessTokenScopeWriteUser, AccessTokenScopeReadUser,
+}
+
+// allAccessTokenScopeBits contains all access token scopes.
+var allAccessTokenScopeBits = map[AccessTokenScope]accessTokenScopeBitmap{
+ AccessTokenScopeAll: accessTokenScopeAllBits,
+ AccessTokenScopePublicOnly: accessTokenScopePublicOnlyBits,
+ AccessTokenScopeReadActivityPub: accessTokenScopeReadActivityPubBits,
+ AccessTokenScopeWriteActivityPub: accessTokenScopeWriteActivityPubBits,
+ AccessTokenScopeReadAdmin: accessTokenScopeReadAdminBits,
+ AccessTokenScopeWriteAdmin: accessTokenScopeWriteAdminBits,
+ AccessTokenScopeReadMisc: accessTokenScopeReadMiscBits,
+ AccessTokenScopeWriteMisc: accessTokenScopeWriteMiscBits,
+ AccessTokenScopeReadNotification: accessTokenScopeReadNotificationBits,
+ AccessTokenScopeWriteNotification: accessTokenScopeWriteNotificationBits,
+ AccessTokenScopeReadOrganization: accessTokenScopeReadOrganizationBits,
+ AccessTokenScopeWriteOrganization: accessTokenScopeWriteOrganizationBits,
+ AccessTokenScopeReadPackage: accessTokenScopeReadPackageBits,
+ AccessTokenScopeWritePackage: accessTokenScopeWritePackageBits,
+ AccessTokenScopeReadIssue: accessTokenScopeReadIssueBits,
+ AccessTokenScopeWriteIssue: accessTokenScopeWriteIssueBits,
+ AccessTokenScopeReadRepository: accessTokenScopeReadRepositoryBits,
+ AccessTokenScopeWriteRepository: accessTokenScopeWriteRepositoryBits,
+ AccessTokenScopeReadUser: accessTokenScopeReadUserBits,
+ AccessTokenScopeWriteUser: accessTokenScopeWriteUserBits,
+}
+
+// hasScope returns true if the string has the given scope
+func (bitmap accessTokenScopeBitmap) hasScope(scope AccessTokenScope) (bool, error) {
+ expectedBits, ok := allAccessTokenScopeBits[scope]
+ if !ok {
+ return false, fmt.Errorf("invalid access token scope: %s", scope)
+ }
+
+ return bitmap&expectedBits == expectedBits, nil
+}
+
+// toScope returns a normalized scope string without any duplicates.
+func (bitmap accessTokenScopeBitmap) toScope(unknownScopes *[]unknownAccessTokenScope) AccessTokenScope {
+ var scopes []string
+
+ // Preserve unknown scopes, and put them at the beginning so that it's clear
+ // when debugging.
+ if unknownScopes != nil {
+ for _, unknownScope := range *unknownScopes {
+ scopes = append(scopes, string(unknownScope))
+ }
+ }
+
+ // iterate over all scopes, and reconstruct the bitmap
+ // if the reconstructed bitmap doesn't change, then the scope is already included
+ var reconstruct accessTokenScopeBitmap
+
+ for _, singleScope := range allAccessTokenScopes {
+ // no need for error checking here, since we know the scope is valid
+ if ok, _ := bitmap.hasScope(singleScope); ok {
+ current := reconstruct | allAccessTokenScopeBits[singleScope]
+ if current == reconstruct {
+ continue
+ }
+
+ reconstruct = current
+ scopes = append(scopes, string(singleScope))
+ }
+ }
+
+ scope := AccessTokenScope(strings.Join(scopes, ","))
+ scope = AccessTokenScope(strings.ReplaceAll(
+ string(scope),
+ "write:activitypub,write:admin,write:misc,write:notification,write:organization,write:package,write:issue,write:repository,write:user",
+ "all",
+ ))
+ return scope
+}
+
+// parse the scope string into a bitmap, thus removing possible duplicates.
+func (s AccessTokenScope) parse() (accessTokenScopeBitmap, *[]unknownAccessTokenScope) {
+ var bitmap accessTokenScopeBitmap
+ var unknownScopes []unknownAccessTokenScope
+
+ // The following is the more performant equivalent of 'for _, v := range strings.Split(remainingScope, ",")' as this is hot code
+ remainingScopes := string(s)
+ for len(remainingScopes) > 0 {
+ i := strings.IndexByte(remainingScopes, ',')
+ var v string
+ if i < 0 {
+ v = remainingScopes
+ remainingScopes = ""
+ } else if i+1 >= len(remainingScopes) {
+ v = remainingScopes[:i]
+ remainingScopes = ""
+ } else {
+ v = remainingScopes[:i]
+ remainingScopes = remainingScopes[i+1:]
+ }
+ singleScope := AccessTokenScope(v)
+ if singleScope == "" {
+ continue
+ }
+ if singleScope == AccessTokenScopeAll {
+ bitmap |= accessTokenScopeAllBits
+ continue
+ }
+
+ bits, ok := allAccessTokenScopeBits[singleScope]
+ if !ok {
+ unknownScopes = append(unknownScopes, unknownAccessTokenScope(string(singleScope)))
+ }
+ bitmap |= bits
+ }
+
+ return bitmap, &unknownScopes
+}
+
+// NormalizePreservingUnknown returns a normalized scope string without any
+// duplicates. Unknown scopes are included.
+func (s AccessTokenScope) NormalizePreservingUnknown() AccessTokenScope {
+ bitmap, unknownScopes := s.parse()
+
+ return bitmap.toScope(unknownScopes)
+}
+
+// OldAccessTokenScope represents the scope for an access token.
+type OldAccessTokenScope string
+
+const (
+ OldAccessTokenScopeAll OldAccessTokenScope = "all"
+
+ OldAccessTokenScopeRepo OldAccessTokenScope = "repo"
+ OldAccessTokenScopeRepoStatus OldAccessTokenScope = "repo:status"
+ OldAccessTokenScopePublicRepo OldAccessTokenScope = "public_repo"
+
+ OldAccessTokenScopeAdminOrg OldAccessTokenScope = "admin:org"
+ OldAccessTokenScopeWriteOrg OldAccessTokenScope = "write:org"
+ OldAccessTokenScopeReadOrg OldAccessTokenScope = "read:org"
+
+ OldAccessTokenScopeAdminPublicKey OldAccessTokenScope = "admin:public_key"
+ OldAccessTokenScopeWritePublicKey OldAccessTokenScope = "write:public_key"
+ OldAccessTokenScopeReadPublicKey OldAccessTokenScope = "read:public_key"
+
+ OldAccessTokenScopeAdminRepoHook OldAccessTokenScope = "admin:repo_hook"
+ OldAccessTokenScopeWriteRepoHook OldAccessTokenScope = "write:repo_hook"
+ OldAccessTokenScopeReadRepoHook OldAccessTokenScope = "read:repo_hook"
+
+ OldAccessTokenScopeAdminOrgHook OldAccessTokenScope = "admin:org_hook"
+
+ OldAccessTokenScopeNotification OldAccessTokenScope = "notification"
+
+ OldAccessTokenScopeUser OldAccessTokenScope = "user"
+ OldAccessTokenScopeReadUser OldAccessTokenScope = "read:user"
+ OldAccessTokenScopeUserEmail OldAccessTokenScope = "user:email"
+ OldAccessTokenScopeUserFollow OldAccessTokenScope = "user:follow"
+
+ OldAccessTokenScopeDeleteRepo OldAccessTokenScope = "delete_repo"
+
+ OldAccessTokenScopePackage OldAccessTokenScope = "package"
+ OldAccessTokenScopeWritePackage OldAccessTokenScope = "write:package"
+ OldAccessTokenScopeReadPackage OldAccessTokenScope = "read:package"
+ OldAccessTokenScopeDeletePackage OldAccessTokenScope = "delete:package"
+
+ OldAccessTokenScopeAdminGPGKey OldAccessTokenScope = "admin:gpg_key"
+ OldAccessTokenScopeWriteGPGKey OldAccessTokenScope = "write:gpg_key"
+ OldAccessTokenScopeReadGPGKey OldAccessTokenScope = "read:gpg_key"
+
+ OldAccessTokenScopeAdminApplication OldAccessTokenScope = "admin:application"
+ OldAccessTokenScopeWriteApplication OldAccessTokenScope = "write:application"
+ OldAccessTokenScopeReadApplication OldAccessTokenScope = "read:application"
+
+ OldAccessTokenScopeSudo OldAccessTokenScope = "sudo"
+)
+
+var accessTokenScopeMap = map[OldAccessTokenScope][]AccessTokenScope{
+ OldAccessTokenScopeAll: {AccessTokenScopeAll},
+ OldAccessTokenScopeRepo: {AccessTokenScopeWriteRepository},
+ OldAccessTokenScopeRepoStatus: {AccessTokenScopeWriteRepository},
+ OldAccessTokenScopePublicRepo: {AccessTokenScopePublicOnly, AccessTokenScopeWriteRepository},
+ OldAccessTokenScopeAdminOrg: {AccessTokenScopeWriteOrganization},
+ OldAccessTokenScopeWriteOrg: {AccessTokenScopeWriteOrganization},
+ OldAccessTokenScopeReadOrg: {AccessTokenScopeReadOrganization},
+ OldAccessTokenScopeAdminPublicKey: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeWritePublicKey: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeReadPublicKey: {AccessTokenScopeReadUser},
+ OldAccessTokenScopeAdminRepoHook: {AccessTokenScopeWriteRepository},
+ OldAccessTokenScopeWriteRepoHook: {AccessTokenScopeWriteRepository},
+ OldAccessTokenScopeReadRepoHook: {AccessTokenScopeReadRepository},
+ OldAccessTokenScopeAdminOrgHook: {AccessTokenScopeWriteOrganization},
+ OldAccessTokenScopeNotification: {AccessTokenScopeWriteNotification},
+ OldAccessTokenScopeUser: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeReadUser: {AccessTokenScopeReadUser},
+ OldAccessTokenScopeUserEmail: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeUserFollow: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeDeleteRepo: {AccessTokenScopeWriteRepository},
+ OldAccessTokenScopePackage: {AccessTokenScopeWritePackage},
+ OldAccessTokenScopeWritePackage: {AccessTokenScopeWritePackage},
+ OldAccessTokenScopeReadPackage: {AccessTokenScopeReadPackage},
+ OldAccessTokenScopeDeletePackage: {AccessTokenScopeWritePackage},
+ OldAccessTokenScopeAdminGPGKey: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeWriteGPGKey: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeReadGPGKey: {AccessTokenScopeReadUser},
+ OldAccessTokenScopeAdminApplication: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeWriteApplication: {AccessTokenScopeWriteUser},
+ OldAccessTokenScopeReadApplication: {AccessTokenScopeReadUser},
+ OldAccessTokenScopeSudo: {AccessTokenScopeWriteAdmin},
+}
+
+type AccessToken struct {
+ ID int64 `xorm:"pk autoincr"`
+ Scope string
+}
+
+func ConvertScopedAccessTokens(x *xorm.Engine) error {
+ var tokens []*AccessToken
+
+ if err := x.Find(&tokens); err != nil {
+ return err
+ }
+
+ for _, token := range tokens {
+ var scopes []string
+ allNewScopesMap := make(map[AccessTokenScope]bool)
+ for _, oldScope := range strings.Split(token.Scope, ",") {
+ if newScopes, exists := accessTokenScopeMap[OldAccessTokenScope(oldScope)]; exists {
+ for _, newScope := range newScopes {
+ allNewScopesMap[newScope] = true
+ }
+ } else {
+ log.Debug("access token scope not recognized as old token scope %s; preserving it", oldScope)
+ scopes = append(scopes, oldScope)
+ }
+ }
+
+ for s := range allNewScopesMap {
+ scopes = append(scopes, string(s))
+ }
+ scope := AccessTokenScope(strings.Join(scopes, ","))
+
+ // normalize the scope
+ normScope := scope.NormalizePreservingUnknown()
+
+ token.Scope = string(normScope)
+
+ // update the db entry with the new scope
+ if _, err := x.Cols("scope").ID(token.ID).Update(token); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/models/migrations/v1_20/v259_test.go b/models/migrations/v1_20/v259_test.go
new file mode 100644
index 0000000..ae219ea
--- /dev/null
+++ b/models/migrations/v1_20/v259_test.go
@@ -0,0 +1,111 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package v1_20 //nolint
+
+import (
+ "sort"
+ "strings"
+ "testing"
+
+ migration_tests "code.gitea.io/gitea/models/migrations/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+type testCase struct {
+ Old OldAccessTokenScope
+ New AccessTokenScope
+}
+
+func createOldTokenScope(scopes ...OldAccessTokenScope) OldAccessTokenScope {
+ s := make([]string, 0, len(scopes))
+ for _, os := range scopes {
+ s = append(s, string(os))
+ }
+ return OldAccessTokenScope(strings.Join(s, ","))
+}
+
+func createNewTokenScope(scopes ...AccessTokenScope) AccessTokenScope {
+ s := make([]string, 0, len(scopes))
+ for _, os := range scopes {
+ s = append(s, string(os))
+ }
+ return AccessTokenScope(strings.Join(s, ","))
+}
+
+func Test_ConvertScopedAccessTokens(t *testing.T) {
+ tests := []testCase{
+ {
+ createOldTokenScope(OldAccessTokenScopeRepo, OldAccessTokenScopeUserFollow),
+ createNewTokenScope(AccessTokenScopeWriteRepository, AccessTokenScopeWriteUser),
+ },
+ {
+ createOldTokenScope(OldAccessTokenScopeUser, OldAccessTokenScopeWritePackage, OldAccessTokenScopeSudo),
+ createNewTokenScope(AccessTokenScopeWriteAdmin, AccessTokenScopeWritePackage, AccessTokenScopeWriteUser),
+ },
+ {
+ createOldTokenScope(),
+ createNewTokenScope(),
+ },
+ {
+ createOldTokenScope(OldAccessTokenScopeReadGPGKey, OldAccessTokenScopeReadOrg, OldAccessTokenScopeAll),
+ createNewTokenScope(AccessTokenScopeAll),
+ },
+ {
+ createOldTokenScope(OldAccessTokenScopeReadGPGKey, "invalid"),
+ createNewTokenScope("invalid", AccessTokenScopeReadUser),
+ },
+ }
+
+ // add a test for each individual mapping
+ for oldScope, newScope := range accessTokenScopeMap {
+ tests = append(tests, testCase{
+ oldScope,
+ createNewTokenScope(newScope...),
+ })
+ }
+
+ x, deferable := migration_tests.PrepareTestEnv(t, 0, new(AccessToken))
+ defer deferable()
+ if x == nil || t.Failed() {
+ t.Skip()
+ return
+ }
+
+ // verify that no fixtures were loaded
+ count, err := x.Count(&AccessToken{})
+ require.NoError(t, err)
+ assert.Equal(t, int64(0), count)
+
+ for _, tc := range tests {
+ _, err = x.Insert(&AccessToken{
+ Scope: string(tc.Old),
+ })
+ require.NoError(t, err)
+ }
+
+ // migrate the scopes
+ err = ConvertScopedAccessTokens(x)
+ require.NoError(t, err)
+
+ // migrate the scopes again (migration should be idempotent)
+ err = ConvertScopedAccessTokens(x)
+ require.NoError(t, err)
+
+ tokens := make([]AccessToken, 0)
+ err = x.Find(&tokens)
+ require.NoError(t, err)
+ assert.Equal(t, len(tests), len(tokens))
+
+ // sort the tokens (insertion order by auto-incrementing primary key)
+ sort.Slice(tokens, func(i, j int) bool {
+ return tokens[i].ID < tokens[j].ID
+ })
+
+ // verify that the converted scopes are equal to the expected test result
+ for idx, newToken := range tokens {
+ assert.Equal(t, string(tests[idx].New), newToken.Scope)
+ }
+}