summaryrefslogtreecommitdiffstats
path: root/models/issues
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--models/issues/assignees.go177
-rw-r--r--models/issues/assignees_test.go95
-rw-r--r--models/issues/comment.go1333
-rw-r--r--models/issues/comment_code.go181
-rw-r--r--models/issues/comment_list.go488
-rw-r--r--models/issues/comment_list_test.go86
-rw-r--r--models/issues/comment_test.go127
-rw-r--r--models/issues/content_history.go242
-rw-r--r--models/issues/content_history_test.go94
-rw-r--r--models/issues/dependency.go222
-rw-r--r--models/issues/dependency_test.go63
-rw-r--r--models/issues/issue.go939
-rw-r--r--models/issues/issue_index.go39
-rw-r--r--models/issues/issue_index_test.go39
-rw-r--r--models/issues/issue_label.go505
-rw-r--r--models/issues/issue_label_test.go30
-rw-r--r--models/issues/issue_list.go622
-rw-r--r--models/issues/issue_list_test.go129
-rw-r--r--models/issues/issue_lock.go66
-rw-r--r--models/issues/issue_project.go162
-rw-r--r--models/issues/issue_search.go489
-rw-r--r--models/issues/issue_stats.go191
-rw-r--r--models/issues/issue_stats_test.go34
-rw-r--r--models/issues/issue_test.go498
-rw-r--r--models/issues/issue_update.go795
-rw-r--r--models/issues/issue_user.go96
-rw-r--r--models/issues/issue_user_test.go61
-rw-r--r--models/issues/issue_watch.go134
-rw-r--r--models/issues/issue_watch_test.go68
-rw-r--r--models/issues/issue_xref.go364
-rw-r--r--models/issues/issue_xref_test.go185
-rw-r--r--models/issues/label.go509
-rw-r--r--models/issues/label_test.go422
-rw-r--r--models/issues/main_test.go33
-rw-r--r--models/issues/milestone.go394
-rw-r--r--models/issues/milestone_list.go195
-rw-r--r--models/issues/milestone_test.go371
-rw-r--r--models/issues/pull.go1105
-rw-r--r--models/issues/pull_list.go264
-rw-r--r--models/issues/pull_test.go476
-rw-r--r--models/issues/reaction.go373
-rw-r--r--models/issues/reaction_test.go178
-rw-r--r--models/issues/review.go1056
-rw-r--r--models/issues/review_list.go200
-rw-r--r--models/issues/review_test.go321
-rw-r--r--models/issues/stopwatch.go296
-rw-r--r--models/issues/stopwatch_test.go79
-rw-r--r--models/issues/tracked_time.go386
-rw-r--r--models/issues/tracked_time_test.go135
49 files changed, 15347 insertions, 0 deletions
diff --git a/models/issues/assignees.go b/models/issues/assignees.go
new file mode 100644
index 0000000..a83cb25
--- /dev/null
+++ b/models/issues/assignees.go
@@ -0,0 +1,177 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// IssueAssignees saves all issue assignees
+type IssueAssignees struct {
+ ID int64 `xorm:"pk autoincr"`
+ AssigneeID int64 `xorm:"INDEX"`
+ IssueID int64 `xorm:"INDEX"`
+}
+
+func init() {
+ db.RegisterModel(new(IssueAssignees))
+}
+
+// LoadAssignees load assignees of this issue.
+func (issue *Issue) LoadAssignees(ctx context.Context) (err error) {
+ if issue.isAssigneeLoaded || len(issue.Assignees) > 0 {
+ return nil
+ }
+
+ // Reset maybe preexisting assignees
+ issue.Assignees = []*user_model.User{}
+ issue.Assignee = nil
+
+ if err = db.GetEngine(ctx).Table("`user`").
+ Join("INNER", "issue_assignees", "assignee_id = `user`.id").
+ Where("issue_assignees.issue_id = ?", issue.ID).
+ Find(&issue.Assignees); err != nil {
+ return err
+ }
+
+ issue.isAssigneeLoaded = true
+ // Check if we have at least one assignee and if yes put it in as `Assignee`
+ if len(issue.Assignees) > 0 {
+ issue.Assignee = issue.Assignees[0]
+ }
+ return nil
+}
+
+// GetAssigneeIDsByIssue returns the IDs of users assigned to an issue
+// but skips joining with `user` for performance reasons.
+// User permissions must be verified elsewhere if required.
+func GetAssigneeIDsByIssue(ctx context.Context, issueID int64) ([]int64, error) {
+ userIDs := make([]int64, 0, 5)
+ return userIDs, db.GetEngine(ctx).
+ Table("issue_assignees").
+ Cols("assignee_id").
+ Where("issue_id = ?", issueID).
+ Distinct("assignee_id").
+ Find(&userIDs)
+}
+
+// IsUserAssignedToIssue returns true when the user is assigned to the issue
+func IsUserAssignedToIssue(ctx context.Context, issue *Issue, user *user_model.User) (isAssigned bool, err error) {
+ return db.Exist[IssueAssignees](ctx, builder.Eq{"assignee_id": user.ID, "issue_id": issue.ID})
+}
+
+// ToggleIssueAssignee changes a user between assigned and not assigned for this issue, and make issue comment for it.
+func ToggleIssueAssignee(ctx context.Context, issue *Issue, doer *user_model.User, assigneeID int64) (removed bool, comment *Comment, err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return false, nil, err
+ }
+ defer committer.Close()
+
+ removed, comment, err = toggleIssueAssignee(ctx, issue, doer, assigneeID, false)
+ if err != nil {
+ return false, nil, err
+ }
+
+ if err := committer.Commit(); err != nil {
+ return false, nil, err
+ }
+
+ return removed, comment, nil
+}
+
+func toggleIssueAssignee(ctx context.Context, issue *Issue, doer *user_model.User, assigneeID int64, isCreate bool) (removed bool, comment *Comment, err error) {
+ removed, err = toggleUserAssignee(ctx, issue, assigneeID)
+ if err != nil {
+ return false, nil, fmt.Errorf("UpdateIssueUserByAssignee: %w", err)
+ }
+
+ // Repo infos
+ if err = issue.LoadRepo(ctx); err != nil {
+ return false, nil, fmt.Errorf("loadRepo: %w", err)
+ }
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeAssignees,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: removed,
+ AssigneeID: assigneeID,
+ }
+ // Comment
+ comment, err = CreateComment(ctx, opts)
+ if err != nil {
+ return false, nil, fmt.Errorf("createComment: %w", err)
+ }
+
+ // if pull request is in the middle of creation - don't call webhook
+ if isCreate {
+ return removed, comment, err
+ }
+
+ return removed, comment, nil
+}
+
+// toggles user assignee state in database
+func toggleUserAssignee(ctx context.Context, issue *Issue, assigneeID int64) (removed bool, err error) {
+ // Check if the user exists
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ return false, err
+ }
+
+ // Check if the submitted user is already assigned, if yes delete him otherwise add him
+ found := false
+ i := 0
+ for ; i < len(issue.Assignees); i++ {
+ if issue.Assignees[i].ID == assigneeID {
+ found = true
+ break
+ }
+ }
+
+ assigneeIn := IssueAssignees{AssigneeID: assigneeID, IssueID: issue.ID}
+ if found {
+ issue.Assignees = append(issue.Assignees[:i], issue.Assignees[i+1:]...)
+ _, err = db.DeleteByBean(ctx, &assigneeIn)
+ if err != nil {
+ return found, err
+ }
+ } else {
+ issue.Assignees = append(issue.Assignees, assignee)
+ if err = db.Insert(ctx, &assigneeIn); err != nil {
+ return found, err
+ }
+ }
+
+ return found, nil
+}
+
+// MakeIDsFromAPIAssigneesToAdd returns an array with all assignee IDs
+func MakeIDsFromAPIAssigneesToAdd(ctx context.Context, oneAssignee string, multipleAssignees []string) (assigneeIDs []int64, err error) {
+ var requestAssignees []string
+
+ // Keeping the old assigning method for compatibility reasons
+ if oneAssignee != "" && !util.SliceContainsString(multipleAssignees, oneAssignee) {
+ requestAssignees = append(requestAssignees, oneAssignee)
+ }
+
+ // Prevent empty assignees
+ if len(multipleAssignees) > 0 && multipleAssignees[0] != "" {
+ requestAssignees = append(requestAssignees, multipleAssignees...)
+ }
+
+ // Get the IDs of all assignees
+ assigneeIDs, err = user_model.GetUserIDsByNames(ctx, requestAssignees, false)
+
+ return assigneeIDs, err
+}
diff --git a/models/issues/assignees_test.go b/models/issues/assignees_test.go
new file mode 100644
index 0000000..47fb81a
--- /dev/null
+++ b/models/issues/assignees_test.go
@@ -0,0 +1,95 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestUpdateAssignee(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Fake issue with assignees
+ issue, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ err = issue.LoadAttributes(db.DefaultContext)
+ require.NoError(t, err)
+
+ // Assign multiple users
+ user2, err := user_model.GetUserByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+ _, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, user2.ID)
+ require.NoError(t, err)
+
+ org3, err := user_model.GetUserByID(db.DefaultContext, 3)
+ require.NoError(t, err)
+ _, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, org3.ID)
+ require.NoError(t, err)
+
+ user1, err := user_model.GetUserByID(db.DefaultContext, 1) // This user is already assigned (see the definition in fixtures), so running UpdateAssignee should unassign him
+ require.NoError(t, err)
+ _, _, err = issues_model.ToggleIssueAssignee(db.DefaultContext, issue, &user_model.User{ID: 1}, user1.ID)
+ require.NoError(t, err)
+
+ // Check if he got removed
+ isAssigned, err := issues_model.IsUserAssignedToIssue(db.DefaultContext, issue, user1)
+ require.NoError(t, err)
+ assert.False(t, isAssigned)
+
+ // Check if they're all there
+ err = issue.LoadAssignees(db.DefaultContext)
+ require.NoError(t, err)
+
+ var expectedAssignees []*user_model.User
+ expectedAssignees = append(expectedAssignees, user2, org3)
+
+ for in, assignee := range issue.Assignees {
+ assert.Equal(t, assignee.ID, expectedAssignees[in].ID)
+ }
+
+ // Check if the user is assigned
+ isAssigned, err = issues_model.IsUserAssignedToIssue(db.DefaultContext, issue, user2)
+ require.NoError(t, err)
+ assert.True(t, isAssigned)
+
+ // This user should not be assigned
+ isAssigned, err = issues_model.IsUserAssignedToIssue(db.DefaultContext, issue, &user_model.User{ID: 4})
+ require.NoError(t, err)
+ assert.False(t, isAssigned)
+}
+
+func TestMakeIDsFromAPIAssigneesToAdd(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ _ = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ _ = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ IDs, err := issues_model.MakeIDsFromAPIAssigneesToAdd(db.DefaultContext, "", []string{""})
+ require.NoError(t, err)
+ assert.Equal(t, []int64{}, IDs)
+
+ _, err = issues_model.MakeIDsFromAPIAssigneesToAdd(db.DefaultContext, "", []string{"none_existing_user"})
+ require.Error(t, err)
+
+ IDs, err = issues_model.MakeIDsFromAPIAssigneesToAdd(db.DefaultContext, "user1", []string{"user1"})
+ require.NoError(t, err)
+ assert.Equal(t, []int64{1}, IDs)
+
+ IDs, err = issues_model.MakeIDsFromAPIAssigneesToAdd(db.DefaultContext, "user2", []string{""})
+ require.NoError(t, err)
+ assert.Equal(t, []int64{2}, IDs)
+
+ IDs, err = issues_model.MakeIDsFromAPIAssigneesToAdd(db.DefaultContext, "", []string{"user1", "user2"})
+ require.NoError(t, err)
+ assert.Equal(t, []int64{1, 2}, IDs)
+}
diff --git a/models/issues/comment.go b/models/issues/comment.go
new file mode 100644
index 0000000..d53e5f5
--- /dev/null
+++ b/models/issues/comment.go
@@ -0,0 +1,1333 @@
+// Copyright 2018 The Gitea Authors.
+// Copyright 2016 The Gogs Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "strconv"
+ "unicode/utf8"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/references"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrCommentNotExist represents a "CommentNotExist" kind of error.
+type ErrCommentNotExist struct {
+ ID int64
+ IssueID int64
+}
+
+// IsErrCommentNotExist checks if an error is a ErrCommentNotExist.
+func IsErrCommentNotExist(err error) bool {
+ _, ok := err.(ErrCommentNotExist)
+ return ok
+}
+
+func (err ErrCommentNotExist) Error() string {
+ return fmt.Sprintf("comment does not exist [id: %d, issue_id: %d]", err.ID, err.IssueID)
+}
+
+func (err ErrCommentNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+var ErrCommentAlreadyChanged = util.NewInvalidArgumentErrorf("the comment is already changed")
+
+// CommentType defines whether a comment is just a simple comment, an action (like close) or a reference.
+type CommentType int
+
+// CommentTypeUndefined is used to search for comments of any type
+const CommentTypeUndefined CommentType = -1
+
+const (
+ CommentTypeComment CommentType = iota // 0 Plain comment, can be associated with a commit (CommitID > 0) and a line (LineNum > 0)
+
+ CommentTypeReopen // 1
+ CommentTypeClose // 2
+
+ CommentTypeIssueRef // 3 References.
+ CommentTypeCommitRef // 4 Reference from a commit (not part of a pull request)
+ CommentTypeCommentRef // 5 Reference from a comment
+ CommentTypePullRef // 6 Reference from a pull request
+
+ CommentTypeLabel // 7 Labels changed
+ CommentTypeMilestone // 8 Milestone changed
+ CommentTypeAssignees // 9 Assignees changed
+ CommentTypeChangeTitle // 10 Change Title
+ CommentTypeDeleteBranch // 11 Delete Branch
+
+ CommentTypeStartTracking // 12 Start a stopwatch for time tracking
+ CommentTypeStopTracking // 13 Stop a stopwatch for time tracking
+ CommentTypeAddTimeManual // 14 Add time manual for time tracking
+ CommentTypeCancelTracking // 15 Cancel a stopwatch for time tracking
+ CommentTypeAddedDeadline // 16 Added a due date
+ CommentTypeModifiedDeadline // 17 Modified the due date
+ CommentTypeRemovedDeadline // 18 Removed a due date
+
+ CommentTypeAddDependency // 19 Dependency added
+ CommentTypeRemoveDependency // 20 Dependency removed
+
+ CommentTypeCode // 21 Comment a line of code
+ CommentTypeReview // 22 Reviews a pull request by giving general feedback
+
+ CommentTypeLock // 23 Lock an issue, giving only collaborators access
+ CommentTypeUnlock // 24 Unlocks a previously locked issue
+
+ CommentTypeChangeTargetBranch // 25 Change pull request's target branch
+
+ CommentTypeDeleteTimeManual // 26 Delete time manual for time tracking
+
+ CommentTypeReviewRequest // 27 add or remove Request from one
+ CommentTypeMergePull // 28 merge pull request
+ CommentTypePullRequestPush // 29 push to PR head branch
+
+ CommentTypeProject // 30 Project changed
+ CommentTypeProjectColumn // 31 Project column changed
+
+ CommentTypeDismissReview // 32 Dismiss Review
+
+ CommentTypeChangeIssueRef // 33 Change issue ref
+
+ CommentTypePRScheduledToAutoMerge // 34 pr was scheduled to auto merge when checks succeed
+ CommentTypePRUnScheduledToAutoMerge // 35 pr was un scheduled to auto merge when checks succeed
+
+ CommentTypePin // 36 pin Issue
+ CommentTypeUnpin // 37 unpin Issue
+)
+
+var commentStrings = []string{
+ "comment",
+ "reopen",
+ "close",
+ "issue_ref",
+ "commit_ref",
+ "comment_ref",
+ "pull_ref",
+ "label",
+ "milestone",
+ "assignees",
+ "change_title",
+ "delete_branch",
+ "start_tracking",
+ "stop_tracking",
+ "add_time_manual",
+ "cancel_tracking",
+ "added_deadline",
+ "modified_deadline",
+ "removed_deadline",
+ "add_dependency",
+ "remove_dependency",
+ "code",
+ "review",
+ "lock",
+ "unlock",
+ "change_target_branch",
+ "delete_time_manual",
+ "review_request",
+ "merge_pull",
+ "pull_push",
+ "project",
+ "project_board", // FIXME: the name should be project_column
+ "dismiss_review",
+ "change_issue_ref",
+ "pull_scheduled_merge",
+ "pull_cancel_scheduled_merge",
+ "pin",
+ "unpin",
+}
+
+func (t CommentType) String() string {
+ return commentStrings[t]
+}
+
+func AsCommentType(typeName string) CommentType {
+ for index, name := range commentStrings {
+ if typeName == name {
+ return CommentType(index)
+ }
+ }
+ return CommentTypeUndefined
+}
+
+func (t CommentType) HasContentSupport() bool {
+ switch t {
+ case CommentTypeComment, CommentTypeCode, CommentTypeReview, CommentTypeDismissReview:
+ return true
+ }
+ return false
+}
+
+func (t CommentType) HasAttachmentSupport() bool {
+ switch t {
+ case CommentTypeComment, CommentTypeCode, CommentTypeReview:
+ return true
+ }
+ return false
+}
+
+func (t CommentType) HasMailReplySupport() bool {
+ switch t {
+ case CommentTypeComment, CommentTypeCode, CommentTypeReview, CommentTypeDismissReview, CommentTypeReopen, CommentTypeClose, CommentTypeMergePull, CommentTypeAssignees:
+ return true
+ }
+ return false
+}
+
+// RoleInRepo presents the user's participation in the repo
+type RoleInRepo string
+
+// RoleDescriptor defines comment "role" tags
+type RoleDescriptor struct {
+ IsPoster bool
+ RoleInRepo RoleInRepo
+}
+
+// Enumerate all the role tags.
+const (
+ RoleRepoOwner RoleInRepo = "owner"
+ RoleRepoMember RoleInRepo = "member"
+ RoleRepoCollaborator RoleInRepo = "collaborator"
+ RoleRepoFirstTimeContributor RoleInRepo = "first_time_contributor"
+ RoleRepoContributor RoleInRepo = "contributor"
+)
+
+// LocaleString returns the locale string name of the role
+func (r RoleInRepo) LocaleString(lang translation.Locale) string {
+ return lang.TrString("repo.issues.role." + string(r))
+}
+
+// LocaleHelper returns the locale tooltip of the role
+func (r RoleInRepo) LocaleHelper(lang translation.Locale) string {
+ return lang.TrString("repo.issues.role." + string(r) + "_helper")
+}
+
+// Comment represents a comment in commit and issue page.
+type Comment struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type CommentType `xorm:"INDEX"`
+ PosterID int64 `xorm:"INDEX"`
+ Poster *user_model.User `xorm:"-"`
+ OriginalAuthor string
+ OriginalAuthorID int64
+ IssueID int64 `xorm:"INDEX"`
+ Issue *Issue `xorm:"-"`
+ LabelID int64
+ Label *Label `xorm:"-"`
+ AddedLabels []*Label `xorm:"-"`
+ RemovedLabels []*Label `xorm:"-"`
+ OldProjectID int64
+ ProjectID int64
+ OldProject *project_model.Project `xorm:"-"`
+ Project *project_model.Project `xorm:"-"`
+ OldMilestoneID int64
+ MilestoneID int64
+ OldMilestone *Milestone `xorm:"-"`
+ Milestone *Milestone `xorm:"-"`
+ TimeID int64
+ Time *TrackedTime `xorm:"-"`
+ AssigneeID int64
+ RemovedAssignee bool
+ Assignee *user_model.User `xorm:"-"`
+ AssigneeTeamID int64 `xorm:"NOT NULL DEFAULT 0"`
+ AssigneeTeam *organization.Team `xorm:"-"`
+ ResolveDoerID int64
+ ResolveDoer *user_model.User `xorm:"-"`
+ OldTitle string
+ NewTitle string
+ OldRef string
+ NewRef string
+ DependentIssueID int64 `xorm:"index"` // This is used by issue_service.deleteIssue
+ DependentIssue *Issue `xorm:"-"`
+
+ CommitID int64
+ Line int64 // - previous line / + proposed line
+ TreePath string
+ Content string `xorm:"LONGTEXT"`
+ ContentVersion int `xorm:"NOT NULL DEFAULT 0"`
+ RenderedContent template.HTML `xorm:"-"`
+
+ // Path represents the 4 lines of code cemented by this comment
+ Patch string `xorm:"-"`
+ PatchQuoted string `xorm:"LONGTEXT patch"`
+
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+
+ // Reference issue in commit message
+ CommitSHA string `xorm:"VARCHAR(64)"`
+
+ Attachments []*repo_model.Attachment `xorm:"-"`
+ Reactions ReactionList `xorm:"-"`
+
+ // For view issue page.
+ ShowRole RoleDescriptor `xorm:"-"`
+
+ Review *Review `xorm:"-"`
+ ReviewID int64 `xorm:"index"`
+ Invalidated bool
+
+ // Reference an issue or pull from another comment, issue or PR
+ // All information is about the origin of the reference
+ RefRepoID int64 `xorm:"index"` // Repo where the referencing
+ RefIssueID int64 `xorm:"index"`
+ RefCommentID int64 `xorm:"index"` // 0 if origin is Issue title or content (or PR's)
+ RefAction references.XRefAction `xorm:"SMALLINT"` // What happens if RefIssueID resolves
+ RefIsPull bool
+
+ RefRepo *repo_model.Repository `xorm:"-"`
+ RefIssue *Issue `xorm:"-"`
+ RefComment *Comment `xorm:"-"`
+
+ Commits []*git_model.SignCommitWithStatuses `xorm:"-"`
+ OldCommit string `xorm:"-"`
+ NewCommit string `xorm:"-"`
+ CommitsNum int64 `xorm:"-"`
+ IsForcePush bool `xorm:"-"`
+}
+
+func init() {
+ db.RegisterModel(new(Comment))
+}
+
+// PushActionContent is content of push pull comment
+type PushActionContent struct {
+ IsForcePush bool `json:"is_force_push"`
+ CommitIDs []string `json:"commit_ids"`
+}
+
+// LoadIssue loads the issue reference for the comment
+func (c *Comment) LoadIssue(ctx context.Context) (err error) {
+ if c.Issue != nil {
+ return nil
+ }
+ c.Issue, err = GetIssueByID(ctx, c.IssueID)
+ return err
+}
+
+// BeforeInsert will be invoked by XORM before inserting a record
+func (c *Comment) BeforeInsert() {
+ c.PatchQuoted = c.Patch
+ if !utf8.ValidString(c.Patch) {
+ c.PatchQuoted = strconv.Quote(c.Patch)
+ }
+}
+
+// BeforeUpdate will be invoked by XORM before updating a record
+func (c *Comment) BeforeUpdate() {
+ c.PatchQuoted = c.Patch
+ if !utf8.ValidString(c.Patch) {
+ c.PatchQuoted = strconv.Quote(c.Patch)
+ }
+}
+
+// AfterLoad is invoked from XORM after setting the values of all fields of this object.
+func (c *Comment) AfterLoad() {
+ c.Patch = c.PatchQuoted
+ if len(c.PatchQuoted) > 0 && c.PatchQuoted[0] == '"' {
+ unquoted, err := strconv.Unquote(c.PatchQuoted)
+ if err == nil {
+ c.Patch = unquoted
+ }
+ }
+}
+
+// LoadPoster loads comment poster
+func (c *Comment) LoadPoster(ctx context.Context) (err error) {
+ if c.Poster != nil {
+ return nil
+ }
+
+ c.Poster, err = user_model.GetPossibleUserByID(ctx, c.PosterID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ c.PosterID = user_model.GhostUserID
+ c.Poster = user_model.NewGhostUser()
+ } else {
+ log.Error("getUserByID[%d]: %v", c.ID, err)
+ }
+ }
+ return err
+}
+
+// AfterDelete is invoked from XORM after the object is deleted.
+func (c *Comment) AfterDelete(ctx context.Context) {
+ if c.ID <= 0 {
+ return
+ }
+
+ _, err := repo_model.DeleteAttachmentsByComment(ctx, c.ID, true)
+ if err != nil {
+ log.Info("Could not delete files for comment %d on issue #%d: %s", c.ID, c.IssueID, err)
+ }
+}
+
+// HTMLURL formats a URL-string to the issue-comment
+func (c *Comment) HTMLURL(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+ return c.Issue.HTMLURL() + c.hashLink(ctx)
+}
+
+// Link formats a relative URL-string to the issue-comment
+func (c *Comment) Link(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+ return c.Issue.Link() + c.hashLink(ctx)
+}
+
+func (c *Comment) hashLink(ctx context.Context) string {
+ if c.Type == CommentTypeCode {
+ if c.ReviewID == 0 {
+ return "/files#" + c.HashTag()
+ }
+ if c.Review == nil {
+ if err := c.LoadReview(ctx); err != nil {
+ log.Warn("LoadReview(%d): %v", c.ReviewID, err)
+ return "/files#" + c.HashTag()
+ }
+ }
+ if c.Review.Type <= ReviewTypePending {
+ return "/files#" + c.HashTag()
+ }
+ }
+ return "#" + c.HashTag()
+}
+
+// APIURL formats a API-string to the issue-comment
+func (c *Comment) APIURL(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+
+ return fmt.Sprintf("%s/issues/comments/%d", c.Issue.Repo.APIURL(), c.ID)
+}
+
+// IssueURL formats a URL-string to the issue
+func (c *Comment) IssueURL(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+
+ if c.Issue.IsPull {
+ return ""
+ }
+
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+ return c.Issue.HTMLURL()
+}
+
+// PRURL formats a URL-string to the pull-request
+func (c *Comment) PRURL(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+
+ if !c.Issue.IsPull {
+ return ""
+ }
+ return c.Issue.HTMLURL()
+}
+
+// CommentHashTag returns unique hash tag for comment id.
+func CommentHashTag(id int64) string {
+ return fmt.Sprintf("issuecomment-%d", id)
+}
+
+// HashTag returns unique hash tag for comment.
+func (c *Comment) HashTag() string {
+ return CommentHashTag(c.ID)
+}
+
+// EventTag returns unique event hash tag for comment.
+func (c *Comment) EventTag() string {
+ return fmt.Sprintf("event-%d", c.ID)
+}
+
+// LoadLabel if comment.Type is CommentTypeLabel, then load Label
+func (c *Comment) LoadLabel(ctx context.Context) error {
+ var label Label
+ has, err := db.GetEngine(ctx).ID(c.LabelID).Get(&label)
+ if err != nil {
+ return err
+ } else if has {
+ c.Label = &label
+ } else {
+ // Ignore Label is deleted, but not clear this table
+ log.Warn("Commit %d cannot load label %d", c.ID, c.LabelID)
+ }
+
+ return nil
+}
+
+// LoadProject if comment.Type is CommentTypeProject, then load project.
+func (c *Comment) LoadProject(ctx context.Context) error {
+ if c.OldProjectID > 0 {
+ var oldProject project_model.Project
+ has, err := db.GetEngine(ctx).ID(c.OldProjectID).Get(&oldProject)
+ if err != nil {
+ return err
+ } else if has {
+ c.OldProject = &oldProject
+ }
+ }
+
+ if c.ProjectID > 0 {
+ var project project_model.Project
+ has, err := db.GetEngine(ctx).ID(c.ProjectID).Get(&project)
+ if err != nil {
+ return err
+ } else if has {
+ c.Project = &project
+ }
+ }
+
+ return nil
+}
+
+// LoadMilestone if comment.Type is CommentTypeMilestone, then load milestone
+func (c *Comment) LoadMilestone(ctx context.Context) error {
+ if c.OldMilestoneID > 0 {
+ var oldMilestone Milestone
+ has, err := db.GetEngine(ctx).ID(c.OldMilestoneID).Get(&oldMilestone)
+ if err != nil {
+ return err
+ } else if has {
+ c.OldMilestone = &oldMilestone
+ }
+ }
+
+ if c.MilestoneID > 0 {
+ var milestone Milestone
+ has, err := db.GetEngine(ctx).ID(c.MilestoneID).Get(&milestone)
+ if err != nil {
+ return err
+ } else if has {
+ c.Milestone = &milestone
+ }
+ }
+ return nil
+}
+
+// LoadAttachments loads attachments (it never returns error, the error during `GetAttachmentsByCommentIDCtx` is ignored)
+func (c *Comment) LoadAttachments(ctx context.Context) error {
+ if len(c.Attachments) > 0 {
+ return nil
+ }
+
+ var err error
+ c.Attachments, err = repo_model.GetAttachmentsByCommentID(ctx, c.ID)
+ if err != nil {
+ log.Error("getAttachmentsByCommentID[%d]: %v", c.ID, err)
+ }
+ return nil
+}
+
+// UpdateAttachments update attachments by UUIDs for the comment
+func (c *Comment) UpdateAttachments(ctx context.Context, uuids []string) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, uuids)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
+ }
+ for i := 0; i < len(attachments); i++ {
+ attachments[i].IssueID = c.IssueID
+ attachments[i].CommentID = c.ID
+ if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
+ return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
+ }
+ }
+ return committer.Commit()
+}
+
+// LoadAssigneeUserAndTeam if comment.Type is CommentTypeAssignees, then load assignees
+func (c *Comment) LoadAssigneeUserAndTeam(ctx context.Context) error {
+ var err error
+
+ if c.AssigneeID > 0 && c.Assignee == nil {
+ c.Assignee, err = user_model.GetUserByID(ctx, c.AssigneeID)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ return err
+ }
+ c.Assignee = user_model.NewGhostUser()
+ }
+ } else if c.AssigneeTeamID > 0 && c.AssigneeTeam == nil {
+ if err = c.LoadIssue(ctx); err != nil {
+ return err
+ }
+
+ if err = c.Issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if err = c.Issue.Repo.LoadOwner(ctx); err != nil {
+ return err
+ }
+
+ if c.Issue.Repo.Owner.IsOrganization() {
+ c.AssigneeTeam, err = organization.GetTeamByID(ctx, c.AssigneeTeamID)
+ if err != nil && !organization.IsErrTeamNotExist(err) {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// LoadResolveDoer if comment.Type is CommentTypeCode and ResolveDoerID not zero, then load resolveDoer
+func (c *Comment) LoadResolveDoer(ctx context.Context) (err error) {
+ if c.ResolveDoerID == 0 || c.Type != CommentTypeCode {
+ return nil
+ }
+ c.ResolveDoer, err = user_model.GetUserByID(ctx, c.ResolveDoerID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ c.ResolveDoer = user_model.NewGhostUser()
+ err = nil
+ }
+ }
+ return err
+}
+
+// IsResolved check if an code comment is resolved
+func (c *Comment) IsResolved() bool {
+ return c.ResolveDoerID != 0 && c.Type == CommentTypeCode
+}
+
+// LoadDepIssueDetails loads Dependent Issue Details
+func (c *Comment) LoadDepIssueDetails(ctx context.Context) (err error) {
+ if c.DependentIssueID <= 0 || c.DependentIssue != nil {
+ return nil
+ }
+ c.DependentIssue, err = GetIssueByID(ctx, c.DependentIssueID)
+ return err
+}
+
+// LoadTime loads the associated time for a CommentTypeAddTimeManual
+func (c *Comment) LoadTime(ctx context.Context) error {
+ if c.Time != nil || c.TimeID == 0 {
+ return nil
+ }
+ var err error
+ c.Time, err = GetTrackedTimeByID(ctx, c.TimeID)
+ return err
+}
+
+// LoadReactions loads comment reactions
+func (c *Comment) LoadReactions(ctx context.Context, repo *repo_model.Repository) (err error) {
+ if c.Reactions != nil {
+ return nil
+ }
+ c.Reactions, _, err = FindReactions(ctx, FindReactionsOptions{
+ IssueID: c.IssueID,
+ CommentID: c.ID,
+ })
+ if err != nil {
+ return err
+ }
+ // Load reaction user data
+ if _, err := c.Reactions.LoadUsers(ctx, repo); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (c *Comment) loadReview(ctx context.Context) (err error) {
+ if c.ReviewID == 0 {
+ return nil
+ }
+ if c.Review == nil {
+ if c.Review, err = GetReviewByID(ctx, c.ReviewID); err != nil {
+ // review request which has been replaced by actual reviews doesn't exist in database anymore, so ignorem them.
+ if c.Type == CommentTypeReviewRequest {
+ return nil
+ }
+ return err
+ }
+ }
+ c.Review.Issue = c.Issue
+ return nil
+}
+
+// LoadReview loads the associated review
+func (c *Comment) LoadReview(ctx context.Context) error {
+ return c.loadReview(ctx)
+}
+
+// DiffSide returns "previous" if Comment.Line is a LOC of the previous changes and "proposed" if it is a LOC of the proposed changes.
+func (c *Comment) DiffSide() string {
+ if c.Line < 0 {
+ return "previous"
+ }
+ return "proposed"
+}
+
+// UnsignedLine returns the LOC of the code comment without + or -
+func (c *Comment) UnsignedLine() uint64 {
+ if c.Line < 0 {
+ return uint64(c.Line * -1)
+ }
+ return uint64(c.Line)
+}
+
+// CodeCommentLink returns the url to a comment in code
+func (c *Comment) CodeCommentLink(ctx context.Context) string {
+ err := c.LoadIssue(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadIssue(%d): %v", c.IssueID, err)
+ return ""
+ }
+ err = c.Issue.LoadRepo(ctx)
+ if err != nil { // Silently dropping errors :unamused:
+ log.Error("loadRepo(%d): %v", c.Issue.RepoID, err)
+ return ""
+ }
+ return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
+}
+
+// LoadPushCommits Load push commits
+func (c *Comment) LoadPushCommits(ctx context.Context) (err error) {
+ if c.Content == "" || c.Commits != nil || c.Type != CommentTypePullRequestPush {
+ return nil
+ }
+
+ var data PushActionContent
+
+ err = json.Unmarshal([]byte(c.Content), &data)
+ if err != nil {
+ return err
+ }
+
+ c.IsForcePush = data.IsForcePush
+
+ if c.IsForcePush {
+ if len(data.CommitIDs) != 2 {
+ return nil
+ }
+ c.OldCommit = data.CommitIDs[0]
+ c.NewCommit = data.CommitIDs[1]
+ } else {
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo)
+ if err != nil {
+ return err
+ }
+ defer closer.Close()
+
+ c.Commits = git_model.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo)
+ c.CommitsNum = int64(len(c.Commits))
+ }
+
+ return err
+}
+
+// CreateComment creates comment with context
+func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ e := db.GetEngine(ctx)
+ var LabelID int64
+ if opts.Label != nil {
+ LabelID = opts.Label.ID
+ }
+
+ comment := &Comment{
+ Type: opts.Type,
+ PosterID: opts.Doer.ID,
+ Poster: opts.Doer,
+ IssueID: opts.Issue.ID,
+ LabelID: LabelID,
+ OldMilestoneID: opts.OldMilestoneID,
+ MilestoneID: opts.MilestoneID,
+ OldProjectID: opts.OldProjectID,
+ ProjectID: opts.ProjectID,
+ TimeID: opts.TimeID,
+ RemovedAssignee: opts.RemovedAssignee,
+ AssigneeID: opts.AssigneeID,
+ AssigneeTeamID: opts.AssigneeTeamID,
+ CommitID: opts.CommitID,
+ CommitSHA: opts.CommitSHA,
+ Line: opts.LineNum,
+ Content: opts.Content,
+ OldTitle: opts.OldTitle,
+ NewTitle: opts.NewTitle,
+ OldRef: opts.OldRef,
+ NewRef: opts.NewRef,
+ DependentIssueID: opts.DependentIssueID,
+ TreePath: opts.TreePath,
+ ReviewID: opts.ReviewID,
+ Patch: opts.Patch,
+ RefRepoID: opts.RefRepoID,
+ RefIssueID: opts.RefIssueID,
+ RefCommentID: opts.RefCommentID,
+ RefAction: opts.RefAction,
+ RefIsPull: opts.RefIsPull,
+ IsForcePush: opts.IsForcePush,
+ Invalidated: opts.Invalidated,
+ }
+ if opts.Issue.NoAutoTime {
+ // Preload the comment with the Issue containing the forced update
+ // date. This is needed to propagate those data in AddCrossReferences()
+ comment.Issue = opts.Issue
+ comment.CreatedUnix = opts.Issue.UpdatedUnix
+ comment.UpdatedUnix = opts.Issue.UpdatedUnix
+ e.NoAutoTime()
+ }
+ if _, err = e.Insert(comment); err != nil {
+ return nil, err
+ }
+
+ if err = opts.Repo.LoadOwner(ctx); err != nil {
+ return nil, err
+ }
+
+ if err = updateCommentInfos(ctx, opts, comment); err != nil {
+ return nil, err
+ }
+
+ if err = comment.AddCrossReferences(ctx, opts.Doer, false); err != nil {
+ return nil, err
+ }
+ if err = committer.Commit(); err != nil {
+ return nil, err
+ }
+ return comment, nil
+}
+
+func updateCommentInfos(ctx context.Context, opts *CreateCommentOptions, comment *Comment) (err error) {
+ // Check comment type.
+ switch opts.Type {
+ case CommentTypeCode:
+ if err = updateAttachments(ctx, opts, comment); err != nil {
+ return err
+ }
+ if comment.ReviewID != 0 {
+ if comment.Review == nil {
+ if err := comment.loadReview(ctx); err != nil {
+ return err
+ }
+ }
+ if comment.Review.Type <= ReviewTypePending {
+ return nil
+ }
+ }
+ fallthrough
+ case CommentTypeComment:
+ if _, err = db.Exec(ctx, "UPDATE `issue` SET num_comments=num_comments+1 WHERE id=?", opts.Issue.ID); err != nil {
+ return err
+ }
+ fallthrough
+ case CommentTypeReview:
+ if err = updateAttachments(ctx, opts, comment); err != nil {
+ return err
+ }
+ case CommentTypeReopen, CommentTypeClose:
+ if err = repo_model.UpdateRepoIssueNumbers(ctx, opts.Issue.RepoID, opts.Issue.IsPull, true); err != nil {
+ return err
+ }
+ }
+ // update the issue's updated_unix column
+ return UpdateIssueCols(ctx, opts.Issue, "updated_unix")
+}
+
+func updateAttachments(ctx context.Context, opts *CreateCommentOptions, comment *Comment) error {
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, opts.Attachments)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", opts.Attachments, err)
+ }
+ for i := range attachments {
+ attachments[i].IssueID = opts.Issue.ID
+ attachments[i].CommentID = comment.ID
+ // No assign value could be 0, so ignore AllCols().
+ if _, err = db.GetEngine(ctx).ID(attachments[i].ID).Update(attachments[i]); err != nil {
+ return fmt.Errorf("update attachment [%d]: %w", attachments[i].ID, err)
+ }
+ }
+ comment.Attachments = attachments
+ return nil
+}
+
+func createDeadlineComment(ctx context.Context, doer *user_model.User, issue *Issue, newDeadlineUnix timeutil.TimeStamp) (*Comment, error) {
+ var content string
+ var commentType CommentType
+
+ // newDeadline = 0 means deleting
+ if newDeadlineUnix == 0 {
+ commentType = CommentTypeRemovedDeadline
+ content = issue.DeadlineUnix.FormatDate()
+ } else if issue.DeadlineUnix == 0 {
+ // Check if the new date was added or modified
+ // If the actual deadline is 0 => deadline added
+ commentType = CommentTypeAddedDeadline
+ content = newDeadlineUnix.FormatDate()
+ } else { // Otherwise modified
+ commentType = CommentTypeModifiedDeadline
+ content = newDeadlineUnix.FormatDate() + "|" + issue.DeadlineUnix.FormatDate()
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ opts := &CreateCommentOptions{
+ Type: commentType,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ Content: content,
+ }
+ comment, err := CreateComment(ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+ return comment, nil
+}
+
+// Creates issue dependency comment
+func createIssueDependencyComment(ctx context.Context, doer *user_model.User, issue, dependentIssue *Issue, add bool) (err error) {
+ cType := CommentTypeAddDependency
+ if !add {
+ cType = CommentTypeRemoveDependency
+ }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // Make two comments, one in each issue
+ opts := &CreateCommentOptions{
+ Type: cType,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ DependentIssueID: dependentIssue.ID,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+
+ opts = &CreateCommentOptions{
+ Type: cType,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: dependentIssue,
+ DependentIssueID: issue.ID,
+ }
+ _, err = CreateComment(ctx, opts)
+ return err
+}
+
+// CreateCommentOptions defines options for creating comment
+type CreateCommentOptions struct {
+ Type CommentType
+ Doer *user_model.User
+ Repo *repo_model.Repository
+ Issue *Issue
+ Label *Label
+
+ DependentIssueID int64
+ OldMilestoneID int64
+ MilestoneID int64
+ OldProjectID int64
+ ProjectID int64
+ TimeID int64
+ AssigneeID int64
+ AssigneeTeamID int64
+ RemovedAssignee bool
+ OldTitle string
+ NewTitle string
+ OldRef string
+ NewRef string
+ CommitID int64
+ CommitSHA string
+ Patch string
+ LineNum int64
+ TreePath string
+ ReviewID int64
+ Content string
+ Attachments []string // UUIDs of attachments
+ RefRepoID int64
+ RefIssueID int64
+ RefCommentID int64
+ RefAction references.XRefAction
+ RefIsPull bool
+ IsForcePush bool
+ Invalidated bool
+}
+
+// GetCommentByID returns the comment by given ID.
+func GetCommentByID(ctx context.Context, id int64) (*Comment, error) {
+ c := new(Comment)
+ has, err := db.GetEngine(ctx).ID(id).Get(c)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrCommentNotExist{id, 0}
+ }
+ return c, nil
+}
+
+// FindCommentsOptions describes the conditions to Find comments
+type FindCommentsOptions struct {
+ db.ListOptions
+ RepoID int64
+ IssueID int64
+ ReviewID int64
+ Since int64
+ Before int64
+ Line int64
+ TreePath string
+ Type CommentType
+ IssueIDs []int64
+ Invalidated optional.Option[bool]
+ IsPull optional.Option[bool]
+}
+
+// ToConds implements FindOptions interface
+func (opts FindCommentsOptions) ToConds() builder.Cond {
+ cond := builder.NewCond()
+ if opts.RepoID > 0 {
+ cond = cond.And(builder.Eq{"issue.repo_id": opts.RepoID})
+ }
+ if opts.IssueID > 0 {
+ cond = cond.And(builder.Eq{"comment.issue_id": opts.IssueID})
+ } else if len(opts.IssueIDs) > 0 {
+ cond = cond.And(builder.In("comment.issue_id", opts.IssueIDs))
+ }
+ if opts.ReviewID > 0 {
+ cond = cond.And(builder.Eq{"comment.review_id": opts.ReviewID})
+ }
+ if opts.Since > 0 {
+ cond = cond.And(builder.Gte{"comment.updated_unix": opts.Since})
+ }
+ if opts.Before > 0 {
+ cond = cond.And(builder.Lte{"comment.updated_unix": opts.Before})
+ }
+ if opts.Type != CommentTypeUndefined {
+ cond = cond.And(builder.Eq{"comment.type": opts.Type})
+ }
+ if opts.Line != 0 {
+ cond = cond.And(builder.Eq{"comment.line": opts.Line})
+ }
+ if len(opts.TreePath) > 0 {
+ cond = cond.And(builder.Eq{"comment.tree_path": opts.TreePath})
+ }
+ if opts.Invalidated.Has() {
+ cond = cond.And(builder.Eq{"comment.invalidated": opts.Invalidated.Value()})
+ }
+ if opts.IsPull.Has() {
+ cond = cond.And(builder.Eq{"issue.is_pull": opts.IsPull.Value()})
+ }
+ return cond
+}
+
+// FindComments returns all comments according options
+func FindComments(ctx context.Context, opts *FindCommentsOptions) (CommentList, error) {
+ comments := make([]*Comment, 0, 10)
+ sess := db.GetEngine(ctx).Where(opts.ToConds())
+ if opts.RepoID > 0 || opts.IsPull.Has() {
+ sess.Join("INNER", "issue", "issue.id = comment.issue_id")
+ }
+
+ if opts.Page != 0 {
+ sess = db.SetSessionPagination(sess, opts)
+ }
+
+ // WARNING: If you change this order you will need to fix createCodeComment
+
+ return comments, sess.
+ Asc("comment.created_unix").
+ Asc("comment.id").
+ Find(&comments)
+}
+
+// CountComments count all comments according options by ignoring pagination
+func CountComments(ctx context.Context, opts *FindCommentsOptions) (int64, error) {
+ sess := db.GetEngine(ctx).Where(opts.ToConds())
+ if opts.RepoID > 0 {
+ sess.Join("INNER", "issue", "issue.id = comment.issue_id")
+ }
+ return sess.Count(&Comment{})
+}
+
+// UpdateCommentInvalidate updates comment invalidated column
+func UpdateCommentInvalidate(ctx context.Context, c *Comment) error {
+ _, err := db.GetEngine(ctx).ID(c.ID).Cols("invalidated").Update(c)
+ return err
+}
+
+// UpdateComment updates information of comment.
+func UpdateComment(ctx context.Context, c *Comment, contentVersion int, doer *user_model.User) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := c.LoadIssue(ctx); err != nil {
+ return err
+ }
+
+ sess := db.GetEngine(ctx).ID(c.ID).AllCols()
+ if c.Issue.NoAutoTime {
+ // update the DataBase
+ sess = sess.NoAutoTime().SetExpr("updated_unix", c.Issue.UpdatedUnix)
+ // the UpdatedUnix value of the Comment also has to be set,
+ // to return the adequate value
+ // see https://codeberg.org/forgejo/forgejo/pulls/764#issuecomment-1023801
+ c.UpdatedUnix = c.Issue.UpdatedUnix
+ }
+ c.ContentVersion = contentVersion + 1
+
+ affected, err := sess.Where("content_version = ?", contentVersion).Update(c)
+ if err != nil {
+ return err
+ }
+ if affected == 0 {
+ return ErrCommentAlreadyChanged
+ }
+ if err := c.AddCrossReferences(ctx, doer, true); err != nil {
+ return err
+ }
+ if err := committer.Commit(); err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ return nil
+}
+
+// DeleteComment deletes the comment
+func DeleteComment(ctx context.Context, comment *Comment) error {
+ e := db.GetEngine(ctx)
+ if _, err := e.ID(comment.ID).NoAutoCondition().Delete(comment); err != nil {
+ return err
+ }
+
+ if _, err := db.DeleteByBean(ctx, &ContentHistory{
+ CommentID: comment.ID,
+ }); err != nil {
+ return err
+ }
+
+ if comment.Type == CommentTypeComment {
+ if _, err := e.ID(comment.IssueID).Decr("num_comments").Update(new(Issue)); err != nil {
+ return err
+ }
+ }
+ if _, err := e.Table("action").
+ Where("comment_id = ?", comment.ID).
+ Update(map[string]any{
+ "is_deleted": true,
+ }); err != nil {
+ return err
+ }
+
+ if err := comment.neuterCrossReferences(ctx); err != nil {
+ return err
+ }
+
+ return DeleteReaction(ctx, &ReactionOptions{CommentID: comment.ID})
+}
+
+// UpdateCommentsMigrationsByType updates comments' migrations information via given git service type and original id and poster id
+func UpdateCommentsMigrationsByType(ctx context.Context, tp structs.GitServiceType, originalAuthorID string, posterID int64) error {
+ _, err := db.GetEngine(ctx).Table("comment").
+ Join("INNER", "issue", "issue.id = comment.issue_id").
+ Join("INNER", "repository", "issue.repo_id = repository.id").
+ Where("repository.original_service_type = ?", tp).
+ And("comment.original_author_id = ?", originalAuthorID).
+ Update(map[string]any{
+ "poster_id": posterID,
+ "original_author": "",
+ "original_author_id": 0,
+ })
+ return err
+}
+
+// CreateAutoMergeComment is a internal function, only use it for CommentTypePRScheduledToAutoMerge and CommentTypePRUnScheduledToAutoMerge CommentTypes
+func CreateAutoMergeComment(ctx context.Context, typ CommentType, pr *PullRequest, doer *user_model.User) (comment *Comment, err error) {
+ if typ != CommentTypePRScheduledToAutoMerge && typ != CommentTypePRUnScheduledToAutoMerge {
+ return nil, fmt.Errorf("comment type %d cannot be used to create an auto merge comment", typ)
+ }
+ if err = pr.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ comment, err = CreateComment(ctx, &CreateCommentOptions{
+ Type: typ,
+ Doer: doer,
+ Repo: pr.BaseRepo,
+ Issue: pr.Issue,
+ })
+ return comment, err
+}
+
+// RemapExternalUser ExternalUserRemappable interface
+func (c *Comment) RemapExternalUser(externalName string, externalID, userID int64) error {
+ c.OriginalAuthor = externalName
+ c.OriginalAuthorID = externalID
+ c.PosterID = userID
+ return nil
+}
+
+// GetUserID ExternalUserRemappable interface
+func (c *Comment) GetUserID() int64 { return c.PosterID }
+
+// GetExternalName ExternalUserRemappable interface
+func (c *Comment) GetExternalName() string { return c.OriginalAuthor }
+
+// GetExternalID ExternalUserRemappable interface
+func (c *Comment) GetExternalID() int64 { return c.OriginalAuthorID }
+
+// CountCommentTypeLabelWithEmptyLabel count label comments with empty label
+func CountCommentTypeLabelWithEmptyLabel(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).Where(builder.Eq{"type": CommentTypeLabel, "label_id": 0}).Count(new(Comment))
+}
+
+// FixCommentTypeLabelWithEmptyLabel count label comments with empty label
+func FixCommentTypeLabelWithEmptyLabel(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).Where(builder.Eq{"type": CommentTypeLabel, "label_id": 0}).Delete(new(Comment))
+}
+
+// CountCommentTypeLabelWithOutsideLabels count label comments with outside label
+func CountCommentTypeLabelWithOutsideLabels(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).Where("comment.type = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id))", CommentTypeLabel).
+ Table("comment").
+ Join("inner", "label", "label.id = comment.label_id").
+ Join("inner", "issue", "issue.id = comment.issue_id ").
+ Join("inner", "repository", "issue.repo_id = repository.id").
+ Count()
+}
+
+// FixCommentTypeLabelWithOutsideLabels count label comments with outside label
+func FixCommentTypeLabelWithOutsideLabels(ctx context.Context) (int64, error) {
+ res, err := db.GetEngine(ctx).Exec(`DELETE FROM comment WHERE comment.id IN (
+ SELECT il_too.id FROM (
+ SELECT com.id
+ FROM comment AS com
+ INNER JOIN label ON com.label_id = label.id
+ INNER JOIN issue on issue.id = com.issue_id
+ INNER JOIN repository ON issue.repo_id = repository.id
+ WHERE
+ com.type = ? AND ((label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id))
+ ) AS il_too)`, CommentTypeLabel)
+ if err != nil {
+ return 0, err
+ }
+
+ return res.RowsAffected()
+}
+
+// HasOriginalAuthor returns if a comment was migrated and has an original author.
+func (c *Comment) HasOriginalAuthor() bool {
+ return c.OriginalAuthor != "" && c.OriginalAuthorID != 0
+}
+
+// InsertIssueComments inserts many comments of issues.
+func InsertIssueComments(ctx context.Context, comments []*Comment) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ issueIDs := container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.IssueID, true
+ })
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ for _, comment := range comments {
+ if _, err := db.GetEngine(ctx).NoAutoTime().Insert(comment); err != nil {
+ return err
+ }
+
+ for _, reaction := range comment.Reactions {
+ reaction.IssueID = comment.IssueID
+ reaction.CommentID = comment.ID
+ }
+ if len(comment.Reactions) > 0 {
+ if err := db.Insert(ctx, comment.Reactions); err != nil {
+ return err
+ }
+ }
+ }
+
+ for _, issueID := range issueIDs {
+ if _, err := db.Exec(ctx, "UPDATE issue set num_comments = (SELECT count(*) FROM comment WHERE issue_id = ? AND `type`=?) WHERE id = ?",
+ issueID, CommentTypeComment, issueID); err != nil {
+ return err
+ }
+ }
+ return committer.Commit()
+}
diff --git a/models/issues/comment_code.go b/models/issues/comment_code.go
new file mode 100644
index 0000000..2f6f57e
--- /dev/null
+++ b/models/issues/comment_code.go
@@ -0,0 +1,181 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+
+ "xorm.io/builder"
+)
+
+// CodeConversation contains the comment of a given review
+type CodeConversation []*Comment
+
+// CodeConversationsAtLine contains the conversations for a given line
+type CodeConversationsAtLine map[int64][]CodeConversation
+
+// CodeConversationsAtLineAndTreePath contains the conversations for a given TreePath and line
+type CodeConversationsAtLineAndTreePath map[string]CodeConversationsAtLine
+
+func newCodeConversationsAtLineAndTreePath(comments []*Comment) CodeConversationsAtLineAndTreePath {
+ tree := make(CodeConversationsAtLineAndTreePath)
+ for _, comment := range comments {
+ tree.insertComment(comment)
+ }
+ return tree
+}
+
+func (tree CodeConversationsAtLineAndTreePath) insertComment(comment *Comment) {
+ // attempt to append comment to existing conversations (i.e. list of comments belonging to the same review)
+ for i, conversation := range tree[comment.TreePath][comment.Line] {
+ if conversation[0].ReviewID == comment.ReviewID {
+ tree[comment.TreePath][comment.Line][i] = append(conversation, comment)
+ return
+ }
+ }
+
+ // no previous conversation was found at this line, create it
+ if tree[comment.TreePath] == nil {
+ tree[comment.TreePath] = make(map[int64][]CodeConversation)
+ }
+
+ tree[comment.TreePath][comment.Line] = append(tree[comment.TreePath][comment.Line], CodeConversation{comment})
+}
+
+// FetchCodeConversations will return a 2d-map: ["Path"]["Line"] = List of CodeConversation (one per review) for this line
+func FetchCodeConversations(ctx context.Context, issue *Issue, doer *user_model.User, showOutdatedComments bool) (CodeConversationsAtLineAndTreePath, error) {
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: issue.ID,
+ }
+ comments, err := findCodeComments(ctx, opts, issue, doer, nil, showOutdatedComments)
+ if err != nil {
+ return nil, err
+ }
+
+ return newCodeConversationsAtLineAndTreePath(comments), nil
+}
+
+// CodeComments represents comments on code by using this structure: FILENAME -> LINE (+ == proposed; - == previous) -> COMMENTS
+type CodeComments map[string]map[int64][]*Comment
+
+func fetchCodeCommentsByReview(ctx context.Context, issue *Issue, doer *user_model.User, review *Review, showOutdatedComments bool) (CodeComments, error) {
+ pathToLineToComment := make(CodeComments)
+ if review == nil {
+ review = &Review{ID: 0}
+ }
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: issue.ID,
+ ReviewID: review.ID,
+ }
+
+ comments, err := findCodeComments(ctx, opts, issue, doer, review, showOutdatedComments)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, comment := range comments {
+ if pathToLineToComment[comment.TreePath] == nil {
+ pathToLineToComment[comment.TreePath] = make(map[int64][]*Comment)
+ }
+ pathToLineToComment[comment.TreePath][comment.Line] = append(pathToLineToComment[comment.TreePath][comment.Line], comment)
+ }
+ return pathToLineToComment, nil
+}
+
+func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issue, doer *user_model.User, review *Review, showOutdatedComments bool) (CommentList, error) {
+ var comments CommentList
+ if review == nil {
+ review = &Review{ID: 0}
+ }
+ conds := opts.ToConds()
+
+ if !showOutdatedComments && review.ID == 0 {
+ conds = conds.And(builder.Eq{"invalidated": false})
+ }
+
+ e := db.GetEngine(ctx)
+ if err := e.Where(conds).
+ Asc("comment.created_unix").
+ Asc("comment.id").
+ Find(&comments); err != nil {
+ return nil, err
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ if err := comments.LoadPosters(ctx); err != nil {
+ return nil, err
+ }
+
+ if err := comments.LoadAttachments(ctx); err != nil {
+ return nil, err
+ }
+
+ // Find all reviews by ReviewID
+ reviews := make(map[int64]*Review)
+ ids := make([]int64, 0, len(comments))
+ for _, comment := range comments {
+ if comment.ReviewID != 0 {
+ ids = append(ids, comment.ReviewID)
+ }
+ }
+ if err := e.In("id", ids).Find(&reviews); err != nil {
+ return nil, err
+ }
+
+ n := 0
+ for _, comment := range comments {
+ if re, ok := reviews[comment.ReviewID]; ok && re != nil {
+ // If the review is pending only the author can see the comments (except if the review is set)
+ if review.ID == 0 && re.Type == ReviewTypePending &&
+ (doer == nil || doer.ID != re.ReviewerID) {
+ continue
+ }
+ comment.Review = re
+ }
+ comments[n] = comment
+ n++
+
+ if err := comment.LoadResolveDoer(ctx); err != nil {
+ return nil, err
+ }
+
+ if err := comment.LoadReactions(ctx, issue.Repo); err != nil {
+ return nil, err
+ }
+
+ var err error
+ if comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ Base: issue.Repo.Link(),
+ },
+ Metas: issue.Repo.ComposeMetas(ctx),
+ }, comment.Content); err != nil {
+ return nil, err
+ }
+ }
+ return comments[:n], nil
+}
+
+// FetchCodeConversation fetches the code conversation of a given comment (same review, treePath and line number)
+func FetchCodeConversation(ctx context.Context, comment *Comment, doer *user_model.User) (CommentList, error) {
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: comment.IssueID,
+ ReviewID: comment.ReviewID,
+ TreePath: comment.TreePath,
+ Line: comment.Line,
+ }
+ return findCodeComments(ctx, opts, comment.Issue, doer, nil, true)
+}
diff --git a/models/issues/comment_list.go b/models/issues/comment_list.go
new file mode 100644
index 0000000..7a133d1
--- /dev/null
+++ b/models/issues/comment_list.go
@@ -0,0 +1,488 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// CommentList defines a list of comments
+type CommentList []*Comment
+
+// LoadPosters loads posters
+func (comments CommentList) LoadPosters(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ posterIDs := container.FilterSlice(comments, func(c *Comment) (int64, bool) {
+ return c.PosterID, c.Poster == nil && user_model.IsValidUserID(c.PosterID)
+ })
+
+ posterMaps, err := getPostersByIDs(ctx, posterIDs)
+ if err != nil {
+ return err
+ }
+
+ for _, comment := range comments {
+ if comment.Poster == nil {
+ comment.PosterID, comment.Poster = user_model.GetUserFromMap(comment.PosterID, posterMaps)
+ }
+ }
+ return nil
+}
+
+func (comments CommentList) getLabelIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.LabelID, comment.LabelID > 0
+ })
+}
+
+func (comments CommentList) loadLabels(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ labelIDs := comments.getLabelIDs()
+ commentLabels := make(map[int64]*Label, len(labelIDs))
+ left := len(labelIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("id", labelIDs[:limit]).
+ Rows(new(Label))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var label Label
+ err = rows.Scan(&label)
+ if err != nil {
+ _ = rows.Close()
+ return err
+ }
+ commentLabels[label.ID] = &label
+ }
+ _ = rows.Close()
+ left -= limit
+ labelIDs = labelIDs[limit:]
+ }
+
+ for _, comment := range comments {
+ comment.Label = commentLabels[comment.ID]
+ }
+ return nil
+}
+
+func (comments CommentList) getMilestoneIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.MilestoneID, comment.MilestoneID > 0
+ })
+}
+
+func (comments CommentList) loadMilestones(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ milestoneIDs := comments.getMilestoneIDs()
+ if len(milestoneIDs) == 0 {
+ return nil
+ }
+
+ milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
+ left := len(milestoneIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ err := db.GetEngine(ctx).
+ In("id", milestoneIDs[:limit]).
+ Find(&milestoneMaps)
+ if err != nil {
+ return err
+ }
+ left -= limit
+ milestoneIDs = milestoneIDs[limit:]
+ }
+
+ for _, issue := range comments {
+ issue.Milestone = milestoneMaps[issue.MilestoneID]
+ }
+ return nil
+}
+
+func (comments CommentList) getOldMilestoneIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.OldMilestoneID, comment.OldMilestoneID > 0
+ })
+}
+
+func (comments CommentList) loadOldMilestones(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ milestoneIDs := comments.getOldMilestoneIDs()
+ if len(milestoneIDs) == 0 {
+ return nil
+ }
+
+ milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
+ left := len(milestoneIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ err := db.GetEngine(ctx).
+ In("id", milestoneIDs[:limit]).
+ Find(&milestoneMaps)
+ if err != nil {
+ return err
+ }
+ left -= limit
+ milestoneIDs = milestoneIDs[limit:]
+ }
+
+ for _, issue := range comments {
+ issue.OldMilestone = milestoneMaps[issue.MilestoneID]
+ }
+ return nil
+}
+
+func (comments CommentList) getAssigneeIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.AssigneeID, user_model.IsValidUserID(comment.AssigneeID)
+ })
+}
+
+func (comments CommentList) loadAssignees(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ assigneeIDs := comments.getAssigneeIDs()
+ assignees := make(map[int64]*user_model.User, len(assigneeIDs))
+ left := len(assigneeIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("id", assigneeIDs[:limit]).
+ Rows(new(user_model.User))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var user user_model.User
+ err = rows.Scan(&user)
+ if err != nil {
+ rows.Close()
+ return err
+ }
+
+ assignees[user.ID] = &user
+ }
+ _ = rows.Close()
+
+ left -= limit
+ assigneeIDs = assigneeIDs[limit:]
+ }
+
+ for _, comment := range comments {
+ comment.AssigneeID, comment.Assignee = user_model.GetUserFromMap(comment.AssigneeID, assignees)
+ }
+ return nil
+}
+
+// getIssueIDs returns all the issue ids on this comment list which issue hasn't been loaded
+func (comments CommentList) getIssueIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.IssueID, comment.Issue == nil
+ })
+}
+
+// Issues returns all the issues of comments
+func (comments CommentList) Issues() IssueList {
+ issues := make(map[int64]*Issue, len(comments))
+ for _, comment := range comments {
+ if comment.Issue != nil {
+ if _, ok := issues[comment.Issue.ID]; !ok {
+ issues[comment.Issue.ID] = comment.Issue
+ }
+ }
+ }
+
+ issueList := make([]*Issue, 0, len(issues))
+ for _, issue := range issues {
+ issueList = append(issueList, issue)
+ }
+ return issueList
+}
+
+// LoadIssues loads issues of comments
+func (comments CommentList) LoadIssues(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ issueIDs := comments.getIssueIDs()
+ issues := make(map[int64]*Issue, len(issueIDs))
+ left := len(issueIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("id", issueIDs[:limit]).
+ Rows(new(Issue))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var issue Issue
+ err = rows.Scan(&issue)
+ if err != nil {
+ rows.Close()
+ return err
+ }
+
+ issues[issue.ID] = &issue
+ }
+ _ = rows.Close()
+
+ left -= limit
+ issueIDs = issueIDs[limit:]
+ }
+
+ for _, comment := range comments {
+ if comment.Issue == nil {
+ comment.Issue = issues[comment.IssueID]
+ }
+ }
+ return nil
+}
+
+func (comments CommentList) getDependentIssueIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ if comment.DependentIssue != nil {
+ return 0, false
+ }
+ return comment.DependentIssueID, comment.DependentIssueID > 0
+ })
+}
+
+func (comments CommentList) loadDependentIssues(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ e := db.GetEngine(ctx)
+ issueIDs := comments.getDependentIssueIDs()
+ issues := make(map[int64]*Issue, len(issueIDs))
+ left := len(issueIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := e.
+ In("id", issueIDs[:limit]).
+ Rows(new(Issue))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var issue Issue
+ err = rows.Scan(&issue)
+ if err != nil {
+ _ = rows.Close()
+ return err
+ }
+
+ issues[issue.ID] = &issue
+ }
+ _ = rows.Close()
+
+ left -= limit
+ issueIDs = issueIDs[limit:]
+ }
+
+ for _, comment := range comments {
+ if comment.DependentIssue == nil {
+ comment.DependentIssue = issues[comment.DependentIssueID]
+ if comment.DependentIssue != nil {
+ if err := comment.DependentIssue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// getAttachmentCommentIDs only return the comment ids which possibly has attachments
+func (comments CommentList) getAttachmentCommentIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.ID, comment.Type.HasAttachmentSupport()
+ })
+}
+
+// LoadAttachmentsByIssue loads attachments by issue id
+func (comments CommentList) LoadAttachmentsByIssue(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ attachments := make([]*repo_model.Attachment, 0, len(comments)/2)
+ if err := db.GetEngine(ctx).Where("issue_id=? AND comment_id>0", comments[0].IssueID).Find(&attachments); err != nil {
+ return err
+ }
+
+ commentAttachmentsMap := make(map[int64][]*repo_model.Attachment, len(comments))
+ for _, attach := range attachments {
+ commentAttachmentsMap[attach.CommentID] = append(commentAttachmentsMap[attach.CommentID], attach)
+ }
+
+ for _, comment := range comments {
+ comment.Attachments = commentAttachmentsMap[comment.ID]
+ }
+ return nil
+}
+
+// LoadAttachments loads attachments
+func (comments CommentList) LoadAttachments(ctx context.Context) (err error) {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ attachments := make(map[int64][]*repo_model.Attachment, len(comments))
+ commentsIDs := comments.getAttachmentCommentIDs()
+ left := len(commentsIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("comment_id", commentsIDs[:limit]).
+ Rows(new(repo_model.Attachment))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var attachment repo_model.Attachment
+ err = rows.Scan(&attachment)
+ if err != nil {
+ _ = rows.Close()
+ return err
+ }
+ attachments[attachment.CommentID] = append(attachments[attachment.CommentID], &attachment)
+ }
+
+ _ = rows.Close()
+ left -= limit
+ commentsIDs = commentsIDs[limit:]
+ }
+
+ for _, comment := range comments {
+ comment.Attachments = attachments[comment.ID]
+ }
+ return nil
+}
+
+func (comments CommentList) getReviewIDs() []int64 {
+ return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
+ return comment.ReviewID, comment.ReviewID > 0
+ })
+}
+
+func (comments CommentList) loadReviews(ctx context.Context) error {
+ if len(comments) == 0 {
+ return nil
+ }
+
+ reviewIDs := comments.getReviewIDs()
+ reviews := make(map[int64]*Review, len(reviewIDs))
+ if err := db.GetEngine(ctx).In("id", reviewIDs).Find(&reviews); err != nil {
+ return err
+ }
+
+ for _, comment := range comments {
+ comment.Review = reviews[comment.ReviewID]
+ if comment.Review == nil {
+ // review request which has been replaced by actual reviews doesn't exist in database anymore, so don't log errors for them.
+ if comment.ReviewID > 0 && comment.Type != CommentTypeReviewRequest {
+ log.Error("comment with review id [%d] but has no review record", comment.ReviewID)
+ }
+ continue
+ }
+
+ // If the comment dismisses a review, we need to load the reviewer to show whose review has been dismissed.
+ // Otherwise, the reviewer is the poster of the comment, so we don't need to load it.
+ if comment.Type == CommentTypeDismissReview {
+ if err := comment.Review.LoadReviewer(ctx); err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+}
+
+// LoadAttributes loads attributes of the comments, except for attachments and
+// comments
+func (comments CommentList) LoadAttributes(ctx context.Context) (err error) {
+ if err = comments.LoadPosters(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.loadLabels(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.loadMilestones(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.loadOldMilestones(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.loadAssignees(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.LoadAttachments(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.loadReviews(ctx); err != nil {
+ return err
+ }
+
+ if err = comments.LoadIssues(ctx); err != nil {
+ return err
+ }
+
+ return comments.loadDependentIssues(ctx)
+}
diff --git a/models/issues/comment_list_test.go b/models/issues/comment_list_test.go
new file mode 100644
index 0000000..5ad1cd1
--- /dev/null
+++ b/models/issues/comment_list_test.go
@@ -0,0 +1,86 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCommentListLoadUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &Issue{})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+
+ for _, testCase := range []struct {
+ poster int64
+ assignee int64
+ user *user_model.User
+ }{
+ {
+ poster: user_model.ActionsUserID,
+ assignee: user_model.ActionsUserID,
+ user: user_model.NewActionsUser(),
+ },
+ {
+ poster: user_model.GhostUserID,
+ assignee: user_model.GhostUserID,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: doer.ID,
+ assignee: doer.ID,
+ user: doer,
+ },
+ {
+ poster: 0,
+ assignee: 0,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: -200,
+ assignee: -200,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: 200,
+ assignee: 200,
+ user: user_model.NewGhostUser(),
+ },
+ } {
+ t.Run(testCase.user.Name, func(t *testing.T) {
+ comment, err := CreateComment(db.DefaultContext, &CreateCommentOptions{
+ Type: CommentTypeComment,
+ Doer: testCase.user,
+ Repo: repo,
+ Issue: issue,
+ Content: "Hello",
+ })
+ assert.NoError(t, err)
+
+ list := CommentList{comment}
+
+ comment.PosterID = testCase.poster
+ comment.Poster = nil
+ assert.NoError(t, list.LoadPosters(db.DefaultContext))
+ require.NotNil(t, comment.Poster)
+ assert.Equal(t, testCase.user.ID, comment.Poster.ID)
+
+ comment.AssigneeID = testCase.assignee
+ comment.Assignee = nil
+ require.NoError(t, list.loadAssignees(db.DefaultContext))
+ require.NotNil(t, comment.Assignee)
+ assert.Equal(t, testCase.user.ID, comment.Assignee.ID)
+ })
+ }
+}
diff --git a/models/issues/comment_test.go b/models/issues/comment_test.go
new file mode 100644
index 0000000..f7088cc
--- /dev/null
+++ b/models/issues/comment_test.go
@@ -0,0 +1,127 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateComment(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+
+ now := time.Now().Unix()
+ comment, err := issues_model.CreateComment(db.DefaultContext, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeComment,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ Content: "Hello",
+ })
+ require.NoError(t, err)
+ then := time.Now().Unix()
+
+ assert.EqualValues(t, issues_model.CommentTypeComment, comment.Type)
+ assert.EqualValues(t, "Hello", comment.Content)
+ assert.EqualValues(t, issue.ID, comment.IssueID)
+ assert.EqualValues(t, doer.ID, comment.PosterID)
+ unittest.AssertInt64InRange(t, now, then, int64(comment.CreatedUnix))
+ unittest.AssertExistsAndLoadBean(t, comment) // assert actually added to DB
+
+ updatedIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue.ID})
+ unittest.AssertInt64InRange(t, now, then, int64(updatedIssue.UpdatedUnix))
+}
+
+func TestFetchCodeConversations(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ res, err := issues_model.FetchCodeConversations(db.DefaultContext, issue, user, false)
+ require.NoError(t, err)
+ assert.Contains(t, res, "README.md")
+ assert.Contains(t, res["README.md"], int64(4))
+ assert.Len(t, res["README.md"][4], 1)
+ assert.Equal(t, int64(4), res["README.md"][4][0][0].ID)
+
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ res, err = issues_model.FetchCodeConversations(db.DefaultContext, issue, user2, false)
+ require.NoError(t, err)
+ assert.Len(t, res, 1)
+}
+
+func TestAsCommentType(t *testing.T) {
+ assert.Equal(t, issues_model.CommentTypeComment, issues_model.CommentType(0))
+ assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType(""))
+ assert.Equal(t, issues_model.CommentTypeUndefined, issues_model.AsCommentType("nonsense"))
+ assert.Equal(t, issues_model.CommentTypeComment, issues_model.AsCommentType("comment"))
+ assert.Equal(t, issues_model.CommentTypePRUnScheduledToAutoMerge, issues_model.AsCommentType("pull_cancel_scheduled_merge"))
+}
+
+func TestMigrate_InsertIssueComments(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ _ = issue.LoadRepo(db.DefaultContext)
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: issue.Repo.OwnerID})
+ reaction := &issues_model.Reaction{
+ Type: "heart",
+ UserID: owner.ID,
+ }
+
+ comment := &issues_model.Comment{
+ PosterID: owner.ID,
+ Poster: owner,
+ IssueID: issue.ID,
+ Issue: issue,
+ Reactions: []*issues_model.Reaction{reaction},
+ }
+
+ err := issues_model.InsertIssueComments(db.DefaultContext, []*issues_model.Comment{comment})
+ require.NoError(t, err)
+
+ issueModified := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ assert.EqualValues(t, issue.NumComments+1, issueModified.NumComments)
+
+ unittest.CheckConsistencyFor(t, &issues_model.Issue{})
+}
+
+func TestUpdateCommentsMigrationsByType(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 1, IssueID: issue.ID})
+
+ // Set repository to migrated from Gitea.
+ repo.OriginalServiceType = structs.GiteaService
+ repo_model.UpdateRepositoryCols(db.DefaultContext, repo, "original_service_type")
+
+ // Set comment to have an original author.
+ comment.OriginalAuthor = "Example User"
+ comment.OriginalAuthorID = 1
+ comment.PosterID = 0
+ _, err := db.GetEngine(db.DefaultContext).ID(comment.ID).Cols("original_author", "original_author_id", "poster_id").Update(comment)
+ require.NoError(t, err)
+
+ require.NoError(t, issues_model.UpdateCommentsMigrationsByType(db.DefaultContext, structs.GiteaService, "1", 513))
+
+ comment = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 1, IssueID: issue.ID})
+ assert.Empty(t, comment.OriginalAuthor)
+ assert.Empty(t, comment.OriginalAuthorID)
+ assert.EqualValues(t, 513, comment.PosterID)
+}
diff --git a/models/issues/content_history.go b/models/issues/content_history.go
new file mode 100644
index 0000000..cd3e217
--- /dev/null
+++ b/models/issues/content_history.go
@@ -0,0 +1,242 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/avatars"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ContentHistory save issue/comment content history revisions.
+type ContentHistory struct {
+ ID int64 `xorm:"pk autoincr"`
+ PosterID int64
+ IssueID int64 `xorm:"INDEX"`
+ CommentID int64 `xorm:"INDEX"`
+ EditedUnix timeutil.TimeStamp `xorm:"INDEX"`
+ ContentText string `xorm:"LONGTEXT"`
+ IsFirstCreated bool
+ IsDeleted bool
+}
+
+// TableName provides the real table name
+func (m *ContentHistory) TableName() string {
+ return "issue_content_history"
+}
+
+func init() {
+ db.RegisterModel(new(ContentHistory))
+}
+
+// SaveIssueContentHistory save history
+func SaveIssueContentHistory(ctx context.Context, posterID, issueID, commentID int64, editTime timeutil.TimeStamp, contentText string, isFirstCreated bool) error {
+ ch := &ContentHistory{
+ PosterID: posterID,
+ IssueID: issueID,
+ CommentID: commentID,
+ ContentText: contentText,
+ EditedUnix: editTime,
+ IsFirstCreated: isFirstCreated,
+ }
+ if err := db.Insert(ctx, ch); err != nil {
+ log.Error("can not save issue content history. err=%v", err)
+ return err
+ }
+ // We only keep at most 20 history revisions now. It is enough in most cases.
+ // If there is a special requirement to keep more, we can consider introducing a new setting option then, but not now.
+ KeepLimitedContentHistory(ctx, issueID, commentID, 20)
+ return nil
+}
+
+// KeepLimitedContentHistory keeps at most `limit` history revisions, it will hard delete out-dated revisions, sorting by revision interval
+// we can ignore all errors in this function, so we just log them
+func KeepLimitedContentHistory(ctx context.Context, issueID, commentID int64, limit int) {
+ type IDEditTime struct {
+ ID int64
+ EditedUnix timeutil.TimeStamp
+ }
+
+ var res []*IDEditTime
+ err := db.GetEngine(ctx).Select("id, edited_unix").Table("issue_content_history").
+ Where(builder.Eq{"issue_id": issueID, "comment_id": commentID}).
+ OrderBy("edited_unix ASC").
+ Find(&res)
+ if err != nil {
+ log.Error("can not query content history for deletion, err=%v", err)
+ return
+ }
+ if len(res) <= 2 {
+ return
+ }
+
+ outDatedCount := len(res) - limit
+ for outDatedCount > 0 {
+ var indexToDelete int
+ minEditedInterval := -1
+ // find a history revision with minimal edited interval to delete, the first and the last should never be deleted
+ for i := 1; i < len(res)-1; i++ {
+ editedInterval := int(res[i].EditedUnix - res[i-1].EditedUnix)
+ if minEditedInterval == -1 || editedInterval < minEditedInterval {
+ minEditedInterval = editedInterval
+ indexToDelete = i
+ }
+ }
+ if indexToDelete == 0 {
+ break
+ }
+
+ // hard delete the found one
+ _, err = db.GetEngine(ctx).Delete(&ContentHistory{ID: res[indexToDelete].ID})
+ if err != nil {
+ log.Error("can not delete out-dated content history, err=%v", err)
+ break
+ }
+ res = append(res[:indexToDelete], res[indexToDelete+1:]...)
+ outDatedCount--
+ }
+}
+
+// QueryIssueContentHistoryEditedCountMap query related history count of each comment (comment_id = 0 means the main issue)
+// only return the count map for "edited" (history revision count > 1) issues or comments.
+func QueryIssueContentHistoryEditedCountMap(dbCtx context.Context, issueID int64) (map[int64]int, error) {
+ type HistoryCountRecord struct {
+ CommentID int64
+ HistoryCount int
+ }
+ records := make([]*HistoryCountRecord, 0)
+
+ err := db.GetEngine(dbCtx).Select("comment_id, COUNT(1) as history_count").
+ Table("issue_content_history").
+ Where(builder.Eq{"issue_id": issueID}).
+ GroupBy("comment_id").
+ Having("count(1) > 1").
+ Find(&records)
+ if err != nil {
+ log.Error("can not query issue content history count map. err=%v", err)
+ return nil, err
+ }
+
+ res := map[int64]int{}
+ for _, r := range records {
+ res[r.CommentID] = r.HistoryCount
+ }
+ return res, nil
+}
+
+// IssueContentListItem the list for web ui
+type IssueContentListItem struct {
+ UserID int64
+ UserName string
+ UserFullName string
+ UserAvatarLink string
+
+ HistoryID int64
+ EditedUnix timeutil.TimeStamp
+ IsFirstCreated bool
+ IsDeleted bool
+}
+
+// FetchIssueContentHistoryList fetch list
+func FetchIssueContentHistoryList(dbCtx context.Context, issueID, commentID int64) ([]*IssueContentListItem, error) {
+ res := make([]*IssueContentListItem, 0)
+ err := db.GetEngine(dbCtx).Select("u.id as user_id, u.name as user_name, u.full_name as user_full_name,"+
+ "h.id as history_id, h.edited_unix, h.is_first_created, h.is_deleted").
+ Table([]string{"issue_content_history", "h"}).
+ Join("LEFT", []string{"user", "u"}, "h.poster_id = u.id").
+ Where(builder.Eq{"issue_id": issueID, "comment_id": commentID}).
+ OrderBy("edited_unix DESC").
+ Find(&res)
+ if err != nil {
+ log.Error("can not fetch issue content history list. err=%v", err)
+ return nil, err
+ }
+
+ for _, item := range res {
+ if item.UserID > 0 {
+ item.UserAvatarLink = avatars.GenerateUserAvatarFastLink(item.UserName, 0)
+ } else {
+ item.UserAvatarLink = avatars.DefaultAvatarLink()
+ }
+ }
+ return res, nil
+}
+
+// HasIssueContentHistory check if a ContentHistory entry exists
+func HasIssueContentHistory(dbCtx context.Context, issueID, commentID int64) (bool, error) {
+ return db.GetEngine(dbCtx).Where("issue_id = ? AND comment_id = ?", issueID, commentID).Exist(new(ContentHistory))
+}
+
+// SoftDeleteIssueContentHistory soft delete
+func SoftDeleteIssueContentHistory(dbCtx context.Context, historyID int64) error {
+ if _, err := db.GetEngine(dbCtx).ID(historyID).Cols("is_deleted", "content_text").Update(&ContentHistory{
+ IsDeleted: true,
+ ContentText: "",
+ }); err != nil {
+ log.Error("failed to soft delete issue content history. err=%v", err)
+ return err
+ }
+ return nil
+}
+
+// ErrIssueContentHistoryNotExist not exist error
+type ErrIssueContentHistoryNotExist struct {
+ ID int64
+}
+
+// Error error string
+func (err ErrIssueContentHistoryNotExist) Error() string {
+ return fmt.Sprintf("issue content history does not exist [id: %d]", err.ID)
+}
+
+func (err ErrIssueContentHistoryNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// GetIssueContentHistoryByID get issue content history
+func GetIssueContentHistoryByID(dbCtx context.Context, id int64) (*ContentHistory, error) {
+ h := &ContentHistory{}
+ has, err := db.GetEngine(dbCtx).ID(id).Get(h)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrIssueContentHistoryNotExist{id}
+ }
+ return h, nil
+}
+
+// GetIssueContentHistoryAndPrev get a history and the previous non-deleted history (to compare)
+func GetIssueContentHistoryAndPrev(dbCtx context.Context, issueID, id int64) (history, prevHistory *ContentHistory, err error) {
+ history = &ContentHistory{}
+ has, err := db.GetEngine(dbCtx).Where("id=? AND issue_id=?", id, issueID).Get(history)
+ if err != nil {
+ log.Error("failed to get issue content history %v. err=%v", id, err)
+ return nil, nil, err
+ } else if !has {
+ log.Error("issue content history does not exist. id=%v. err=%v", id, err)
+ return nil, nil, &ErrIssueContentHistoryNotExist{id}
+ }
+
+ prevHistory = &ContentHistory{}
+ has, err = db.GetEngine(dbCtx).Where(builder.Eq{"issue_id": history.IssueID, "comment_id": history.CommentID, "is_deleted": false}).
+ And(builder.Lt{"edited_unix": history.EditedUnix}).
+ OrderBy("edited_unix DESC").Limit(1).
+ Get(prevHistory)
+
+ if err != nil {
+ log.Error("failed to get issue content history %v. err=%v", id, err)
+ return nil, nil, err
+ } else if !has {
+ return history, nil, nil
+ }
+
+ return history, prevHistory, nil
+}
diff --git a/models/issues/content_history_test.go b/models/issues/content_history_test.go
new file mode 100644
index 0000000..dde6f19
--- /dev/null
+++ b/models/issues/content_history_test.go
@@ -0,0 +1,94 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestContentHistory(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ dbCtx := db.DefaultContext
+ timeStampNow := timeutil.TimeStampNow()
+
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 0, timeStampNow, "i-a", true)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 0, timeStampNow.Add(2), "i-b", false)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 0, timeStampNow.Add(7), "i-c", false)
+
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 100, timeStampNow, "c-a", true)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 100, timeStampNow.Add(5), "c-b", false)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 100, timeStampNow.Add(20), "c-c", false)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 100, timeStampNow.Add(50), "c-d", false)
+ _ = issues_model.SaveIssueContentHistory(dbCtx, 1, 10, 100, timeStampNow.Add(51), "c-e", false)
+
+ h1, _ := issues_model.GetIssueContentHistoryByID(dbCtx, 1)
+ assert.EqualValues(t, 1, h1.ID)
+
+ m, _ := issues_model.QueryIssueContentHistoryEditedCountMap(dbCtx, 10)
+ assert.Equal(t, 3, m[0])
+ assert.Equal(t, 5, m[100])
+
+ /*
+ we can not have this test with real `User` now, because we can not depend on `User` model (circle-import), so there is no `user` table
+ when the refactor of models are done, this test will be possible to be run then with a real `User` model.
+ */
+ type User struct {
+ ID int64
+ Name string
+ FullName string
+ }
+ _ = db.GetEngine(dbCtx).Sync(&User{})
+
+ list1, _ := issues_model.FetchIssueContentHistoryList(dbCtx, 10, 0)
+ assert.Len(t, list1, 3)
+ list2, _ := issues_model.FetchIssueContentHistoryList(dbCtx, 10, 100)
+ assert.Len(t, list2, 5)
+
+ hasHistory1, _ := issues_model.HasIssueContentHistory(dbCtx, 10, 0)
+ assert.True(t, hasHistory1)
+ hasHistory2, _ := issues_model.HasIssueContentHistory(dbCtx, 10, 1)
+ assert.False(t, hasHistory2)
+
+ h6, h6Prev, _ := issues_model.GetIssueContentHistoryAndPrev(dbCtx, 10, 6)
+ assert.EqualValues(t, 6, h6.ID)
+ assert.EqualValues(t, 5, h6Prev.ID)
+
+ // soft-delete
+ _ = issues_model.SoftDeleteIssueContentHistory(dbCtx, 5)
+ h6, h6Prev, _ = issues_model.GetIssueContentHistoryAndPrev(dbCtx, 10, 6)
+ assert.EqualValues(t, 6, h6.ID)
+ assert.EqualValues(t, 4, h6Prev.ID)
+
+ // only keep 3 history revisions for comment_id=100, the first and the last should never be deleted
+ issues_model.KeepLimitedContentHistory(dbCtx, 10, 100, 3)
+ list1, _ = issues_model.FetchIssueContentHistoryList(dbCtx, 10, 0)
+ assert.Len(t, list1, 3)
+ list2, _ = issues_model.FetchIssueContentHistoryList(dbCtx, 10, 100)
+ assert.Len(t, list2, 3)
+ assert.EqualValues(t, 8, list2[0].HistoryID)
+ assert.EqualValues(t, 7, list2[1].HistoryID)
+ assert.EqualValues(t, 4, list2[2].HistoryID)
+}
+
+func TestHasIssueContentHistory(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Ensures that comment_id is into taken account even if it's zero.
+ _ = issues_model.SaveIssueContentHistory(db.DefaultContext, 1, 11, 100, timeutil.TimeStampNow(), "c-a", true)
+ _ = issues_model.SaveIssueContentHistory(db.DefaultContext, 1, 11, 100, timeutil.TimeStampNow().Add(5), "c-b", false)
+
+ hasHistory1, _ := issues_model.HasIssueContentHistory(db.DefaultContext, 11, 0)
+ assert.False(t, hasHistory1)
+ hasHistory2, _ := issues_model.HasIssueContentHistory(db.DefaultContext, 11, 100)
+ assert.True(t, hasHistory2)
+}
diff --git a/models/issues/dependency.go b/models/issues/dependency.go
new file mode 100644
index 0000000..146dd18
--- /dev/null
+++ b/models/issues/dependency.go
@@ -0,0 +1,222 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// ErrDependencyExists represents a "DependencyAlreadyExists" kind of error.
+type ErrDependencyExists struct {
+ IssueID int64
+ DependencyID int64
+}
+
+// IsErrDependencyExists checks if an error is a ErrDependencyExists.
+func IsErrDependencyExists(err error) bool {
+ _, ok := err.(ErrDependencyExists)
+ return ok
+}
+
+func (err ErrDependencyExists) Error() string {
+ return fmt.Sprintf("issue dependency does already exist [issue id: %d, dependency id: %d]", err.IssueID, err.DependencyID)
+}
+
+func (err ErrDependencyExists) Unwrap() error {
+ return util.ErrAlreadyExist
+}
+
+// ErrDependencyNotExists represents a "DependencyAlreadyExists" kind of error.
+type ErrDependencyNotExists struct {
+ IssueID int64
+ DependencyID int64
+}
+
+// IsErrDependencyNotExists checks if an error is a ErrDependencyExists.
+func IsErrDependencyNotExists(err error) bool {
+ _, ok := err.(ErrDependencyNotExists)
+ return ok
+}
+
+func (err ErrDependencyNotExists) Error() string {
+ return fmt.Sprintf("issue dependency does not exist [issue id: %d, dependency id: %d]", err.IssueID, err.DependencyID)
+}
+
+func (err ErrDependencyNotExists) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrCircularDependency represents a "DependencyCircular" kind of error.
+type ErrCircularDependency struct {
+ IssueID int64
+ DependencyID int64
+}
+
+// IsErrCircularDependency checks if an error is a ErrCircularDependency.
+func IsErrCircularDependency(err error) bool {
+ _, ok := err.(ErrCircularDependency)
+ return ok
+}
+
+func (err ErrCircularDependency) Error() string {
+ return fmt.Sprintf("circular dependencies exists (two issues blocking each other) [issue id: %d, dependency id: %d]", err.IssueID, err.DependencyID)
+}
+
+// ErrDependenciesLeft represents an error where the issue you're trying to close still has dependencies left.
+type ErrDependenciesLeft struct {
+ IssueID int64
+}
+
+// IsErrDependenciesLeft checks if an error is a ErrDependenciesLeft.
+func IsErrDependenciesLeft(err error) bool {
+ _, ok := err.(ErrDependenciesLeft)
+ return ok
+}
+
+func (err ErrDependenciesLeft) Error() string {
+ return fmt.Sprintf("issue has open dependencies [issue id: %d]", err.IssueID)
+}
+
+// ErrUnknownDependencyType represents an error where an unknown dependency type was passed
+type ErrUnknownDependencyType struct {
+ Type DependencyType
+}
+
+// IsErrUnknownDependencyType checks if an error is ErrUnknownDependencyType
+func IsErrUnknownDependencyType(err error) bool {
+ _, ok := err.(ErrUnknownDependencyType)
+ return ok
+}
+
+func (err ErrUnknownDependencyType) Error() string {
+ return fmt.Sprintf("unknown dependency type [type: %d]", err.Type)
+}
+
+func (err ErrUnknownDependencyType) Unwrap() error {
+ return util.ErrInvalidArgument
+}
+
+// IssueDependency represents an issue dependency
+type IssueDependency struct {
+ ID int64 `xorm:"pk autoincr"`
+ UserID int64 `xorm:"NOT NULL"`
+ IssueID int64 `xorm:"UNIQUE(issue_dependency) NOT NULL"`
+ DependencyID int64 `xorm:"UNIQUE(issue_dependency) NOT NULL"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"updated"`
+}
+
+func init() {
+ db.RegisterModel(new(IssueDependency))
+}
+
+// DependencyType Defines Dependency Type Constants
+type DependencyType int
+
+// Define Dependency Types
+const (
+ DependencyTypeBlockedBy DependencyType = iota
+ DependencyTypeBlocking
+)
+
+// CreateIssueDependency creates a new dependency for an issue
+func CreateIssueDependency(ctx context.Context, user *user_model.User, issue, dep *Issue) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Check if it already exists
+ exists, err := issueDepExists(ctx, issue.ID, dep.ID)
+ if err != nil {
+ return err
+ }
+ if exists {
+ return ErrDependencyExists{issue.ID, dep.ID}
+ }
+ // And if it would be circular
+ circular, err := issueDepExists(ctx, dep.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+ if circular {
+ return ErrCircularDependency{issue.ID, dep.ID}
+ }
+
+ if err := db.Insert(ctx, &IssueDependency{
+ UserID: user.ID,
+ IssueID: issue.ID,
+ DependencyID: dep.ID,
+ }); err != nil {
+ return err
+ }
+
+ // Add comment referencing the new dependency
+ if err = createIssueDependencyComment(ctx, user, issue, dep, true); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// RemoveIssueDependency removes a dependency from an issue
+func RemoveIssueDependency(ctx context.Context, user *user_model.User, issue, dep *Issue, depType DependencyType) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ var issueDepToDelete IssueDependency
+
+ switch depType {
+ case DependencyTypeBlockedBy:
+ issueDepToDelete = IssueDependency{IssueID: issue.ID, DependencyID: dep.ID}
+ case DependencyTypeBlocking:
+ issueDepToDelete = IssueDependency{IssueID: dep.ID, DependencyID: issue.ID}
+ default:
+ return ErrUnknownDependencyType{depType}
+ }
+
+ affected, err := db.GetEngine(ctx).Delete(&issueDepToDelete)
+ if err != nil {
+ return err
+ }
+
+ // If we deleted nothing, the dependency did not exist
+ if affected <= 0 {
+ return ErrDependencyNotExists{issue.ID, dep.ID}
+ }
+
+ // Add comment referencing the removed dependency
+ if err = createIssueDependencyComment(ctx, user, issue, dep, false); err != nil {
+ return err
+ }
+ return committer.Commit()
+}
+
+// Check if the dependency already exists
+func issueDepExists(ctx context.Context, issueID, depID int64) (bool, error) {
+ return db.GetEngine(ctx).Where("(issue_id = ? AND dependency_id = ?)", issueID, depID).Exist(&IssueDependency{})
+}
+
+// IssueNoDependenciesLeft checks if issue can be closed
+func IssueNoDependenciesLeft(ctx context.Context, issue *Issue) (bool, error) {
+ exists, err := db.GetEngine(ctx).
+ Table("issue_dependency").
+ Select("issue.*").
+ Join("INNER", "issue", "issue.id = issue_dependency.dependency_id").
+ Where("issue_dependency.issue_id = ?", issue.ID).
+ And("issue.is_closed = ?", "0").
+ Exist(&Issue{})
+
+ return !exists, err
+}
diff --git a/models/issues/dependency_test.go b/models/issues/dependency_test.go
new file mode 100644
index 0000000..1e73c58
--- /dev/null
+++ b/models/issues/dependency_test.go
@@ -0,0 +1,63 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateIssueDependency(t *testing.T) {
+ // Prepare
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1, err := user_model.GetUserByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+
+ // Create a dependency and check if it was successful
+ err = issues_model.CreateIssueDependency(db.DefaultContext, user1, issue1, issue2)
+ require.NoError(t, err)
+
+ // Do it again to see if it will check if the dependency already exists
+ err = issues_model.CreateIssueDependency(db.DefaultContext, user1, issue1, issue2)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrDependencyExists(err))
+
+ // Check for circular dependencies
+ err = issues_model.CreateIssueDependency(db.DefaultContext, user1, issue2, issue1)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrCircularDependency(err))
+
+ _ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeAddDependency, PosterID: user1.ID, IssueID: issue1.ID})
+
+ // Check if dependencies left is correct
+ left, err := issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1)
+ require.NoError(t, err)
+ assert.False(t, left)
+
+ // Close #2 and check again
+ _, err = issues_model.ChangeIssueStatus(db.DefaultContext, issue2, user1, true)
+ require.NoError(t, err)
+
+ left, err = issues_model.IssueNoDependenciesLeft(db.DefaultContext, issue1)
+ require.NoError(t, err)
+ assert.True(t, left)
+
+ // Test removing the dependency
+ err = issues_model.RemoveIssueDependency(db.DefaultContext, user1, issue1, issue2, issues_model.DependencyTypeBlockedBy)
+ require.NoError(t, err)
+}
diff --git a/models/issues/issue.go b/models/issues/issue.go
new file mode 100644
index 0000000..f7379b7
--- /dev/null
+++ b/models/issues/issue.go
@@ -0,0 +1,939 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "regexp"
+ "slices"
+
+ "code.gitea.io/gitea/models/db"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrIssueNotExist represents a "IssueNotExist" kind of error.
+type ErrIssueNotExist struct {
+ ID int64
+ RepoID int64
+ Index int64
+}
+
+// IsErrIssueNotExist checks if an error is a ErrIssueNotExist.
+func IsErrIssueNotExist(err error) bool {
+ _, ok := err.(ErrIssueNotExist)
+ return ok
+}
+
+func (err ErrIssueNotExist) Error() string {
+ return fmt.Sprintf("issue does not exist [id: %d, repo_id: %d, index: %d]", err.ID, err.RepoID, err.Index)
+}
+
+func (err ErrIssueNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrIssueIsClosed represents a "IssueIsClosed" kind of error.
+type ErrIssueIsClosed struct {
+ ID int64
+ RepoID int64
+ Index int64
+}
+
+// IsErrIssueIsClosed checks if an error is a ErrIssueNotExist.
+func IsErrIssueIsClosed(err error) bool {
+ _, ok := err.(ErrIssueIsClosed)
+ return ok
+}
+
+func (err ErrIssueIsClosed) Error() string {
+ return fmt.Sprintf("issue is closed [id: %d, repo_id: %d, index: %d]", err.ID, err.RepoID, err.Index)
+}
+
+// ErrNewIssueInsert is used when the INSERT statement in newIssue fails
+type ErrNewIssueInsert struct {
+ OriginalError error
+}
+
+// IsErrNewIssueInsert checks if an error is a ErrNewIssueInsert.
+func IsErrNewIssueInsert(err error) bool {
+ _, ok := err.(ErrNewIssueInsert)
+ return ok
+}
+
+func (err ErrNewIssueInsert) Error() string {
+ return err.OriginalError.Error()
+}
+
+// ErrIssueWasClosed is used when close a closed issue
+type ErrIssueWasClosed struct {
+ ID int64
+ Index int64
+}
+
+// IsErrIssueWasClosed checks if an error is a ErrIssueWasClosed.
+func IsErrIssueWasClosed(err error) bool {
+ _, ok := err.(ErrIssueWasClosed)
+ return ok
+}
+
+func (err ErrIssueWasClosed) Error() string {
+ return fmt.Sprintf("Issue [%d] %d was already closed", err.ID, err.Index)
+}
+
+var ErrIssueAlreadyChanged = util.NewInvalidArgumentErrorf("the issue is already changed")
+
+// Issue represents an issue or pull request of repository.
+type Issue struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX UNIQUE(repo_index)"`
+ Repo *repo_model.Repository `xorm:"-"`
+ Index int64 `xorm:"UNIQUE(repo_index)"` // Index in one repository.
+ PosterID int64 `xorm:"INDEX"`
+ Poster *user_model.User `xorm:"-"`
+ OriginalAuthor string
+ OriginalAuthorID int64 `xorm:"index"`
+ Title string `xorm:"name"`
+ Content string `xorm:"LONGTEXT"`
+ RenderedContent template.HTML `xorm:"-"`
+ ContentVersion int `xorm:"NOT NULL DEFAULT 0"`
+ Labels []*Label `xorm:"-"`
+ isLabelsLoaded bool `xorm:"-"`
+ MilestoneID int64 `xorm:"INDEX"`
+ Milestone *Milestone `xorm:"-"`
+ isMilestoneLoaded bool `xorm:"-"`
+ Project *project_model.Project `xorm:"-"`
+ Priority int
+ AssigneeID int64 `xorm:"-"`
+ Assignee *user_model.User `xorm:"-"`
+ isAssigneeLoaded bool `xorm:"-"`
+ IsClosed bool `xorm:"INDEX"`
+ IsRead bool `xorm:"-"`
+ IsPull bool `xorm:"INDEX"` // Indicates whether is a pull request or not.
+ PullRequest *PullRequest `xorm:"-"`
+ NumComments int
+ Ref string
+ PinOrder int `xorm:"DEFAULT 0"`
+
+ DeadlineUnix timeutil.TimeStamp `xorm:"INDEX"`
+
+ Created timeutil.TimeStampNano
+
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+ ClosedUnix timeutil.TimeStamp `xorm:"INDEX"`
+ NoAutoTime bool `xorm:"-"`
+
+ Attachments []*repo_model.Attachment `xorm:"-"`
+ isAttachmentsLoaded bool `xorm:"-"`
+ Comments CommentList `xorm:"-"`
+ Reactions ReactionList `xorm:"-"`
+ TotalTrackedTime int64 `xorm:"-"`
+ Assignees []*user_model.User `xorm:"-"`
+
+ // IsLocked limits commenting abilities to users on an issue
+ // with write access
+ IsLocked bool `xorm:"NOT NULL DEFAULT false"`
+
+ // For view issue page.
+ ShowRole RoleDescriptor `xorm:"-"`
+}
+
+var (
+ issueTasksPat = regexp.MustCompile(`(^|\n)\s*[-*]\s*\[[\sxX]\]`)
+ issueTasksDonePat = regexp.MustCompile(`(^|\n)\s*[-*]\s*\[[xX]\]`)
+)
+
+// IssueIndex represents the issue index table
+type IssueIndex db.ResourceIndex
+
+func init() {
+ db.RegisterModel(new(Issue))
+ db.RegisterModel(new(IssueIndex))
+}
+
+// LoadTotalTimes load total tracked time
+func (issue *Issue) LoadTotalTimes(ctx context.Context) (err error) {
+ opts := FindTrackedTimesOptions{IssueID: issue.ID}
+ issue.TotalTrackedTime, err = opts.toSession(db.GetEngine(ctx)).SumInt(&TrackedTime{}, "time")
+ if err != nil {
+ return err
+ }
+ return nil
+}
+
+// IsOverdue checks if the issue is overdue
+func (issue *Issue) IsOverdue() bool {
+ if issue.IsClosed {
+ return issue.ClosedUnix >= issue.DeadlineUnix
+ }
+ return timeutil.TimeStampNow() >= issue.DeadlineUnix
+}
+
+// LoadRepo loads issue's repository
+func (issue *Issue) LoadRepo(ctx context.Context) (err error) {
+ if issue.Repo == nil && issue.RepoID != 0 {
+ issue.Repo, err = repo_model.GetRepositoryByID(ctx, issue.RepoID)
+ if err != nil {
+ return fmt.Errorf("getRepositoryByID [%d]: %w", issue.RepoID, err)
+ }
+ }
+ return nil
+}
+
+func (issue *Issue) LoadAttachments(ctx context.Context) (err error) {
+ if issue.isAttachmentsLoaded || issue.Attachments != nil {
+ return nil
+ }
+
+ issue.Attachments, err = repo_model.GetAttachmentsByIssueID(ctx, issue.ID)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByIssueID [%d]: %w", issue.ID, err)
+ }
+ issue.isAttachmentsLoaded = true
+ return nil
+}
+
+// IsTimetrackerEnabled returns true if the repo enables timetracking
+func (issue *Issue) IsTimetrackerEnabled(ctx context.Context) bool {
+ if err := issue.LoadRepo(ctx); err != nil {
+ log.Error(fmt.Sprintf("loadRepo: %v", err))
+ return false
+ }
+ return issue.Repo.IsTimetrackerEnabled(ctx)
+}
+
+// LoadPoster loads poster
+func (issue *Issue) LoadPoster(ctx context.Context) (err error) {
+ if issue.Poster == nil && issue.PosterID != 0 {
+ issue.Poster, err = user_model.GetPossibleUserByID(ctx, issue.PosterID)
+ if err != nil {
+ issue.PosterID = user_model.GhostUserID
+ issue.Poster = user_model.NewGhostUser()
+ if !user_model.IsErrUserNotExist(err) {
+ return fmt.Errorf("getUserByID.(poster) [%d]: %w", issue.PosterID, err)
+ }
+ return nil
+ }
+ }
+ return err
+}
+
+// LoadPullRequest loads pull request info
+func (issue *Issue) LoadPullRequest(ctx context.Context) (err error) {
+ if issue.IsPull {
+ if issue.PullRequest == nil && issue.ID != 0 {
+ issue.PullRequest, err = GetPullRequestByIssueID(ctx, issue.ID)
+ if err != nil {
+ if IsErrPullRequestNotExist(err) {
+ return err
+ }
+ return fmt.Errorf("getPullRequestByIssueID [%d]: %w", issue.ID, err)
+ }
+ }
+ if issue.PullRequest != nil {
+ issue.PullRequest.Issue = issue
+ }
+ }
+ return nil
+}
+
+func (issue *Issue) loadComments(ctx context.Context) (err error) {
+ return issue.loadCommentsByType(ctx, CommentTypeUndefined)
+}
+
+// LoadDiscussComments loads discuss comments
+func (issue *Issue) LoadDiscussComments(ctx context.Context) error {
+ return issue.loadCommentsByType(ctx, CommentTypeComment)
+}
+
+func (issue *Issue) loadCommentsByType(ctx context.Context, tp CommentType) (err error) {
+ if issue.Comments != nil {
+ return nil
+ }
+ issue.Comments, err = FindComments(ctx, &FindCommentsOptions{
+ IssueID: issue.ID,
+ Type: tp,
+ })
+ return err
+}
+
+func (issue *Issue) loadReactions(ctx context.Context) (err error) {
+ if issue.Reactions != nil {
+ return nil
+ }
+ reactions, _, err := FindReactions(ctx, FindReactionsOptions{
+ IssueID: issue.ID,
+ })
+ if err != nil {
+ return err
+ }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ // Load reaction user data
+ if _, err := reactions.LoadUsers(ctx, issue.Repo); err != nil {
+ return err
+ }
+
+ // Cache comments to map
+ comments := make(map[int64]*Comment)
+ for _, comment := range issue.Comments {
+ comments[comment.ID] = comment
+ }
+ // Add reactions either to issue or comment
+ for _, react := range reactions {
+ if react.CommentID == 0 {
+ issue.Reactions = append(issue.Reactions, react)
+ } else if comment, ok := comments[react.CommentID]; ok {
+ comment.Reactions = append(comment.Reactions, react)
+ }
+ }
+ return nil
+}
+
+// LoadMilestone load milestone of this issue.
+func (issue *Issue) LoadMilestone(ctx context.Context) (err error) {
+ if !issue.isMilestoneLoaded && (issue.Milestone == nil || issue.Milestone.ID != issue.MilestoneID) && issue.MilestoneID > 0 {
+ issue.Milestone, err = GetMilestoneByRepoID(ctx, issue.RepoID, issue.MilestoneID)
+ if err != nil && !IsErrMilestoneNotExist(err) {
+ return fmt.Errorf("getMilestoneByRepoID [repo_id: %d, milestone_id: %d]: %w", issue.RepoID, issue.MilestoneID, err)
+ }
+ issue.isMilestoneLoaded = true
+ }
+ return nil
+}
+
+// LoadAttributes loads the attribute of this issue.
+func (issue *Issue) LoadAttributes(ctx context.Context) (err error) {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadPoster(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadMilestone(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadProject(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadAssignees(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadPullRequest(ctx); err != nil && !IsErrPullRequestNotExist(err) {
+ // It is possible pull request is not yet created.
+ return err
+ }
+
+ if err = issue.LoadAttachments(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.loadComments(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.Comments.LoadAttributes(ctx); err != nil {
+ return err
+ }
+ if issue.IsTimetrackerEnabled(ctx) {
+ if err = issue.LoadTotalTimes(ctx); err != nil {
+ return err
+ }
+ }
+
+ return issue.loadReactions(ctx)
+}
+
+func (issue *Issue) ResetAttributesLoaded() {
+ issue.isLabelsLoaded = false
+ issue.isMilestoneLoaded = false
+ issue.isAttachmentsLoaded = false
+ issue.isAssigneeLoaded = false
+}
+
+// GetIsRead load the `IsRead` field of the issue
+func (issue *Issue) GetIsRead(ctx context.Context, userID int64) error {
+ issueUser := &IssueUser{IssueID: issue.ID, UID: userID}
+ if has, err := db.GetEngine(ctx).Get(issueUser); err != nil {
+ return err
+ } else if !has {
+ issue.IsRead = false
+ return nil
+ }
+ issue.IsRead = issueUser.IsRead
+ return nil
+}
+
+// APIURL returns the absolute APIURL to this issue.
+func (issue *Issue) APIURL(ctx context.Context) string {
+ if issue.Repo == nil {
+ err := issue.LoadRepo(ctx)
+ if err != nil {
+ log.Error("Issue[%d].APIURL(): %v", issue.ID, err)
+ return ""
+ }
+ }
+ return fmt.Sprintf("%s/issues/%d", issue.Repo.APIURL(), issue.Index)
+}
+
+// HTMLURL returns the absolute URL to this issue.
+func (issue *Issue) HTMLURL() string {
+ var path string
+ if issue.IsPull {
+ path = "pulls"
+ } else {
+ path = "issues"
+ }
+ return fmt.Sprintf("%s/%s/%d", issue.Repo.HTMLURL(), path, issue.Index)
+}
+
+// Link returns the issue's relative URL.
+func (issue *Issue) Link() string {
+ var path string
+ if issue.IsPull {
+ path = "pulls"
+ } else {
+ path = "issues"
+ }
+ return fmt.Sprintf("%s/%s/%d", issue.Repo.Link(), path, issue.Index)
+}
+
+// DiffURL returns the absolute URL to this diff
+func (issue *Issue) DiffURL() string {
+ if issue.IsPull {
+ return fmt.Sprintf("%s/pulls/%d.diff", issue.Repo.HTMLURL(), issue.Index)
+ }
+ return ""
+}
+
+// PatchURL returns the absolute URL to this patch
+func (issue *Issue) PatchURL() string {
+ if issue.IsPull {
+ return fmt.Sprintf("%s/pulls/%d.patch", issue.Repo.HTMLURL(), issue.Index)
+ }
+ return ""
+}
+
+// State returns string representation of issue status.
+func (issue *Issue) State() api.StateType {
+ if issue.IsClosed {
+ return api.StateClosed
+ }
+ return api.StateOpen
+}
+
+// HashTag returns unique hash tag for issue.
+func (issue *Issue) HashTag() string {
+ return fmt.Sprintf("issue-%d", issue.ID)
+}
+
+// IsPoster returns true if given user by ID is the poster.
+func (issue *Issue) IsPoster(uid int64) bool {
+ return issue.OriginalAuthorID == 0 && issue.PosterID == uid
+}
+
+// GetTasks returns the amount of tasks in the issues content
+func (issue *Issue) GetTasks() int {
+ return len(issueTasksPat.FindAllStringIndex(issue.Content, -1))
+}
+
+// GetTasksDone returns the amount of completed tasks in the issues content
+func (issue *Issue) GetTasksDone() int {
+ return len(issueTasksDonePat.FindAllStringIndex(issue.Content, -1))
+}
+
+// GetLastEventTimestamp returns the last user visible event timestamp, either the creation of this issue or the close.
+func (issue *Issue) GetLastEventTimestamp() timeutil.TimeStamp {
+ if issue.IsClosed {
+ return issue.ClosedUnix
+ }
+ return issue.CreatedUnix
+}
+
+// GetLastEventLabel returns the localization label for the current issue.
+func (issue *Issue) GetLastEventLabel() string {
+ if issue.IsClosed {
+ if issue.IsPull && issue.PullRequest.HasMerged {
+ return "repo.pulls.merged_by"
+ }
+ return "repo.issues.closed_by"
+ }
+ return "repo.issues.opened_by"
+}
+
+// GetLastComment return last comment for the current issue.
+func (issue *Issue) GetLastComment(ctx context.Context) (*Comment, error) {
+ var c Comment
+ exist, err := db.GetEngine(ctx).Where("type = ?", CommentTypeComment).
+ And("issue_id = ?", issue.ID).Desc("created_unix").Get(&c)
+ if err != nil {
+ return nil, err
+ }
+ if !exist {
+ return nil, nil
+ }
+ return &c, nil
+}
+
+// GetLastEventLabelFake returns the localization label for the current issue without providing a link in the username.
+func (issue *Issue) GetLastEventLabelFake() string {
+ if issue.IsClosed {
+ if issue.IsPull && issue.PullRequest.HasMerged {
+ return "repo.pulls.merged_by_fake"
+ }
+ return "repo.issues.closed_by_fake"
+ }
+ return "repo.issues.opened_by_fake"
+}
+
+// GetIssueByIndex returns raw issue without loading attributes by index in a repository.
+func GetIssueByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
+ if index < 1 {
+ return nil, ErrIssueNotExist{0, repoID, index}
+ }
+ issue := &Issue{
+ RepoID: repoID,
+ Index: index,
+ }
+ has, err := db.GetEngine(ctx).Get(issue)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrIssueNotExist{0, repoID, index}
+ }
+ return issue, nil
+}
+
+// GetIssueWithAttrsByIndex returns issue by index in a repository.
+func GetIssueWithAttrsByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
+ issue, err := GetIssueByIndex(ctx, repoID, index)
+ if err != nil {
+ return nil, err
+ }
+ return issue, issue.LoadAttributes(ctx)
+}
+
+// GetIssueByID returns an issue by given ID.
+func GetIssueByID(ctx context.Context, id int64) (*Issue, error) {
+ issue := new(Issue)
+ has, err := db.GetEngine(ctx).ID(id).Get(issue)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrIssueNotExist{id, 0, 0}
+ }
+ return issue, nil
+}
+
+// GetIssuesByIDs return issues with the given IDs.
+// If keepOrder is true, the order of the returned issues will be the same as the given IDs.
+func GetIssuesByIDs(ctx context.Context, issueIDs []int64, keepOrder ...bool) (IssueList, error) {
+ issues := make([]*Issue, 0, len(issueIDs))
+
+ if err := db.GetEngine(ctx).In("id", issueIDs).Find(&issues); err != nil {
+ return nil, err
+ }
+
+ if len(keepOrder) > 0 && keepOrder[0] {
+ m := make(map[int64]*Issue, len(issues))
+ appended := container.Set[int64]{}
+ for _, issue := range issues {
+ m[issue.ID] = issue
+ }
+ issues = issues[:0]
+ for _, id := range issueIDs {
+ if issue, ok := m[id]; ok && !appended.Contains(id) { // make sure the id is existed and not appended
+ appended.Add(id)
+ issues = append(issues, issue)
+ }
+ }
+ }
+
+ return issues, nil
+}
+
+// GetIssueIDsByRepoID returns all issue ids by repo id
+func GetIssueIDsByRepoID(ctx context.Context, repoID int64) ([]int64, error) {
+ ids := make([]int64, 0, 10)
+ err := db.GetEngine(ctx).Table("issue").Cols("id").Where("repo_id = ?", repoID).Find(&ids)
+ return ids, err
+}
+
+// GetParticipantsIDsByIssueID returns the IDs of all users who participated in comments of an issue,
+// but skips joining with `user` for performance reasons.
+// User permissions must be verified elsewhere if required.
+func GetParticipantsIDsByIssueID(ctx context.Context, issueID int64) ([]int64, error) {
+ userIDs := make([]int64, 0, 5)
+ return userIDs, db.GetEngine(ctx).
+ Table("comment").
+ Cols("poster_id").
+ Where("issue_id = ?", issueID).
+ And("type in (?,?,?)", CommentTypeComment, CommentTypeCode, CommentTypeReview).
+ Distinct("poster_id").
+ Find(&userIDs)
+}
+
+// IsUserParticipantsOfIssue return true if user is participants of an issue
+func IsUserParticipantsOfIssue(ctx context.Context, user *user_model.User, issue *Issue) bool {
+ userIDs, err := issue.GetParticipantIDsByIssue(ctx)
+ if err != nil {
+ log.Error(err.Error())
+ return false
+ }
+ return slices.Contains(userIDs, user.ID)
+}
+
+// DependencyInfo represents high level information about an issue which is a dependency of another issue.
+type DependencyInfo struct {
+ Issue `xorm:"extends"`
+ repo_model.Repository `xorm:"extends"`
+}
+
+// GetParticipantIDsByIssue returns all userIDs who are participated in comments of an issue and issue author
+func (issue *Issue) GetParticipantIDsByIssue(ctx context.Context) ([]int64, error) {
+ if issue == nil {
+ return nil, nil
+ }
+ userIDs := make([]int64, 0, 5)
+ if err := db.GetEngine(ctx).Table("comment").Cols("poster_id").
+ Where("`comment`.issue_id = ?", issue.ID).
+ And("`comment`.type in (?,?,?)", CommentTypeComment, CommentTypeCode, CommentTypeReview).
+ And("`user`.is_active = ?", true).
+ And("`user`.prohibit_login = ?", false).
+ Join("INNER", "`user`", "`user`.id = `comment`.poster_id").
+ Distinct("poster_id").
+ Find(&userIDs); err != nil {
+ return nil, fmt.Errorf("get poster IDs: %w", err)
+ }
+ if !slices.Contains(userIDs, issue.PosterID) {
+ return append(userIDs, issue.PosterID), nil
+ }
+ return userIDs, nil
+}
+
+// BlockedByDependencies finds all Dependencies an issue is blocked by
+func (issue *Issue) BlockedByDependencies(ctx context.Context, opts db.ListOptions) (issueDeps []*DependencyInfo, err error) {
+ sess := db.GetEngine(ctx).
+ Table("issue").
+ Join("INNER", "repository", "repository.id = issue.repo_id").
+ Join("INNER", "issue_dependency", "issue_dependency.dependency_id = issue.id").
+ Where("issue_id = ?", issue.ID).
+ // sort by repo id then created date, with the issues of the same repo at the beginning of the list
+ OrderBy("CASE WHEN issue.repo_id = ? THEN 0 ELSE issue.repo_id END, issue.created_unix DESC", issue.RepoID)
+ if opts.Page != 0 {
+ sess = db.SetSessionPagination(sess, &opts)
+ }
+ err = sess.Find(&issueDeps)
+
+ for _, depInfo := range issueDeps {
+ depInfo.Issue.Repo = &depInfo.Repository
+ }
+
+ return issueDeps, err
+}
+
+// BlockingDependencies returns all blocking dependencies, aka all other issues a given issue blocks
+func (issue *Issue) BlockingDependencies(ctx context.Context) (issueDeps []*DependencyInfo, err error) {
+ err = db.GetEngine(ctx).
+ Table("issue").
+ Join("INNER", "repository", "repository.id = issue.repo_id").
+ Join("INNER", "issue_dependency", "issue_dependency.issue_id = issue.id").
+ Where("dependency_id = ?", issue.ID).
+ // sort by repo id then created date, with the issues of the same repo at the beginning of the list
+ OrderBy("CASE WHEN issue.repo_id = ? THEN 0 ELSE issue.repo_id END, issue.created_unix DESC", issue.RepoID).
+ Find(&issueDeps)
+
+ for _, depInfo := range issueDeps {
+ depInfo.Issue.Repo = &depInfo.Repository
+ }
+
+ return issueDeps, err
+}
+
+func migratedIssueCond(tp api.GitServiceType) builder.Cond {
+ return builder.In("issue_id",
+ builder.Select("issue.id").
+ From("issue").
+ InnerJoin("repository", "issue.repo_id = repository.id").
+ Where(builder.Eq{
+ "repository.original_service_type": tp,
+ }),
+ )
+}
+
+// RemapExternalUser ExternalUserRemappable interface
+func (issue *Issue) RemapExternalUser(externalName string, externalID, userID int64) error {
+ issue.OriginalAuthor = externalName
+ issue.OriginalAuthorID = externalID
+ issue.PosterID = userID
+ return nil
+}
+
+// GetUserID ExternalUserRemappable interface
+func (issue *Issue) GetUserID() int64 { return issue.PosterID }
+
+// GetExternalName ExternalUserRemappable interface
+func (issue *Issue) GetExternalName() string { return issue.OriginalAuthor }
+
+// GetExternalID ExternalUserRemappable interface
+func (issue *Issue) GetExternalID() int64 { return issue.OriginalAuthorID }
+
+// HasOriginalAuthor returns if an issue was migrated and has an original author.
+func (issue *Issue) HasOriginalAuthor() bool {
+ return issue.OriginalAuthor != "" && issue.OriginalAuthorID != 0
+}
+
+var ErrIssueMaxPinReached = util.NewInvalidArgumentErrorf("the max number of pinned issues has been readched")
+
+// IsPinned returns if a Issue is pinned
+func (issue *Issue) IsPinned() bool {
+ return issue.PinOrder != 0
+}
+
+// Pin pins a Issue
+func (issue *Issue) Pin(ctx context.Context, user *user_model.User) error {
+ // If the Issue is already pinned, we don't need to pin it twice
+ if issue.IsPinned() {
+ return nil
+ }
+
+ var maxPin int
+ _, err := db.GetEngine(ctx).SQL("SELECT MAX(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ?", issue.RepoID, issue.IsPull).Get(&maxPin)
+ if err != nil {
+ return err
+ }
+
+ // Check if the maximum allowed Pins reached
+ if maxPin >= setting.Repository.Issue.MaxPinned {
+ return ErrIssueMaxPinReached
+ }
+
+ _, err = db.GetEngine(ctx).Table("issue").
+ Where("id = ?", issue.ID).
+ Update(map[string]any{
+ "pin_order": maxPin + 1,
+ })
+ if err != nil {
+ return err
+ }
+
+ // Add the pin event to the history
+ opts := &CreateCommentOptions{
+ Type: CommentTypePin,
+ Doer: user,
+ Repo: issue.Repo,
+ Issue: issue,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// UnpinIssue unpins a Issue
+func (issue *Issue) Unpin(ctx context.Context, user *user_model.User) error {
+ // If the Issue is not pinned, we don't need to unpin it
+ if !issue.IsPinned() {
+ return nil
+ }
+
+ // This sets the Pin for all Issues that come after the unpined Issue to the correct value
+ _, err := db.GetEngine(ctx).Exec("UPDATE issue SET pin_order = pin_order - 1 WHERE repo_id = ? AND is_pull = ? AND pin_order > ?", issue.RepoID, issue.IsPull, issue.PinOrder)
+ if err != nil {
+ return err
+ }
+
+ _, err = db.GetEngine(ctx).Table("issue").
+ Where("id = ?", issue.ID).
+ Update(map[string]any{
+ "pin_order": 0,
+ })
+ if err != nil {
+ return err
+ }
+
+ // Add the unpin event to the history
+ opts := &CreateCommentOptions{
+ Type: CommentTypeUnpin,
+ Doer: user,
+ Repo: issue.Repo,
+ Issue: issue,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// PinOrUnpin pins or unpins a Issue
+func (issue *Issue) PinOrUnpin(ctx context.Context, user *user_model.User) error {
+ if !issue.IsPinned() {
+ return issue.Pin(ctx, user)
+ }
+
+ return issue.Unpin(ctx, user)
+}
+
+// MovePin moves a Pinned Issue to a new Position
+func (issue *Issue) MovePin(ctx context.Context, newPosition int) error {
+ // If the Issue is not pinned, we can't move them
+ if !issue.IsPinned() {
+ return nil
+ }
+
+ if newPosition < 1 {
+ return fmt.Errorf("The Position can't be lower than 1")
+ }
+
+ dbctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ var maxPin int
+ _, err = db.GetEngine(dbctx).SQL("SELECT MAX(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ?", issue.RepoID, issue.IsPull).Get(&maxPin)
+ if err != nil {
+ return err
+ }
+
+ // If the new Position bigger than the current Maximum, set it to the Maximum
+ if newPosition > maxPin+1 {
+ newPosition = maxPin + 1
+ }
+
+ // Lower the Position of all Pinned Issue that came after the current Position
+ _, err = db.GetEngine(dbctx).Exec("UPDATE issue SET pin_order = pin_order - 1 WHERE repo_id = ? AND is_pull = ? AND pin_order > ?", issue.RepoID, issue.IsPull, issue.PinOrder)
+ if err != nil {
+ return err
+ }
+
+ // Higher the Position of all Pinned Issues that comes after the new Position
+ _, err = db.GetEngine(dbctx).Exec("UPDATE issue SET pin_order = pin_order + 1 WHERE repo_id = ? AND is_pull = ? AND pin_order >= ?", issue.RepoID, issue.IsPull, newPosition)
+ if err != nil {
+ return err
+ }
+
+ _, err = db.GetEngine(dbctx).Table("issue").
+ Where("id = ?", issue.ID).
+ Update(map[string]any{
+ "pin_order": newPosition,
+ })
+ if err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// GetPinnedIssues returns the pinned Issues for the given Repo and type
+func GetPinnedIssues(ctx context.Context, repoID int64, isPull bool) (IssueList, error) {
+ issues := make(IssueList, 0)
+
+ err := db.GetEngine(ctx).
+ Table("issue").
+ Where("repo_id = ?", repoID).
+ And("is_pull = ?", isPull).
+ And("pin_order > 0").
+ OrderBy("pin_order").
+ Find(&issues)
+ if err != nil {
+ return nil, err
+ }
+
+ err = issues.LoadAttributes(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ return issues, nil
+}
+
+// IsNewPinnedAllowed returns if a new Issue or Pull request can be pinned
+func IsNewPinAllowed(ctx context.Context, repoID int64, isPull bool) (bool, error) {
+ var maxPin int
+ _, err := db.GetEngine(ctx).SQL("SELECT COUNT(pin_order) FROM issue WHERE repo_id = ? AND is_pull = ? AND pin_order > 0", repoID, isPull).Get(&maxPin)
+ if err != nil {
+ return false, err
+ }
+
+ return maxPin < setting.Repository.Issue.MaxPinned, nil
+}
+
+// IsErrIssueMaxPinReached returns if the error is, that the User can't pin more Issues
+func IsErrIssueMaxPinReached(err error) bool {
+ return err == ErrIssueMaxPinReached
+}
+
+// InsertIssues insert issues to database
+func InsertIssues(ctx context.Context, issues ...*Issue) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ for _, issue := range issues {
+ if err := insertIssue(ctx, issue); err != nil {
+ return err
+ }
+ }
+ return committer.Commit()
+}
+
+func insertIssue(ctx context.Context, issue *Issue) error {
+ sess := db.GetEngine(ctx)
+ if _, err := sess.NoAutoTime().Insert(issue); err != nil {
+ return err
+ }
+ issueLabels := make([]IssueLabel, 0, len(issue.Labels))
+ for _, label := range issue.Labels {
+ issueLabels = append(issueLabels, IssueLabel{
+ IssueID: issue.ID,
+ LabelID: label.ID,
+ })
+ }
+ if len(issueLabels) > 0 {
+ if _, err := sess.Insert(issueLabels); err != nil {
+ return err
+ }
+ }
+
+ for _, reaction := range issue.Reactions {
+ reaction.IssueID = issue.ID
+ }
+
+ if len(issue.Reactions) > 0 {
+ if _, err := sess.Insert(issue.Reactions); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/models/issues/issue_index.go b/models/issues/issue_index.go
new file mode 100644
index 0000000..9386027
--- /dev/null
+++ b/models/issues/issue_index.go
@@ -0,0 +1,39 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+)
+
+func GetMaxIssueIndexForRepo(ctx context.Context, repoID int64) (int64, error) {
+ var max int64
+ if _, err := db.GetEngine(ctx).Select("MAX(`index`)").Table("issue").Where("repo_id=?", repoID).Get(&max); err != nil {
+ return 0, err
+ }
+ return max, nil
+}
+
+// RecalculateIssueIndexForRepo create issue_index for repo if not exist and
+// update it based on highest index of existing issues assigned to a repo
+func RecalculateIssueIndexForRepo(ctx context.Context, repoID int64) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ max, err := GetMaxIssueIndexForRepo(ctx, repoID)
+ if err != nil {
+ return err
+ }
+
+ if err = db.SyncMaxResourceIndex(ctx, "issue_index", repoID, max); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/models/issues/issue_index_test.go b/models/issues/issue_index_test.go
new file mode 100644
index 0000000..eb79a08
--- /dev/null
+++ b/models/issues/issue_index_test.go
@@ -0,0 +1,39 @@
+// Copyright 2024 The Forgejo Authors
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetMaxIssueIndexForRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ maxPR, err := issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
+ require.NoError(t, err)
+
+ issue := testCreateIssue(t, repo.ID, repo.OwnerID, "title1", "content1", false)
+ assert.Greater(t, issue.Index, maxPR)
+
+ maxPR, err = issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
+ require.NoError(t, err)
+
+ pull := testCreateIssue(t, repo.ID, repo.OwnerID, "title2", "content2", true)
+ assert.Greater(t, pull.Index, maxPR)
+
+ maxPR, err = issues_model.GetMaxIssueIndexForRepo(db.DefaultContext, repo.ID)
+ require.NoError(t, err)
+
+ assert.Equal(t, maxPR, pull.Index)
+}
diff --git a/models/issues/issue_label.go b/models/issues/issue_label.go
new file mode 100644
index 0000000..10fc821
--- /dev/null
+++ b/models/issues/issue_label.go
@@ -0,0 +1,505 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "sort"
+
+ "code.gitea.io/gitea/models/db"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "xorm.io/builder"
+)
+
+// IssueLabel represents an issue-label relation.
+type IssueLabel struct {
+ ID int64 `xorm:"pk autoincr"`
+ IssueID int64 `xorm:"UNIQUE(s)"`
+ LabelID int64 `xorm:"UNIQUE(s)"`
+}
+
+// HasIssueLabel returns true if issue has been labeled.
+func HasIssueLabel(ctx context.Context, issueID, labelID int64) bool {
+ has, _ := db.GetEngine(ctx).Where("issue_id = ? AND label_id = ?", issueID, labelID).Get(new(IssueLabel))
+ return has
+}
+
+// newIssueLabel this function creates a new label it does not check if the label is valid for the issue
+// YOU MUST CHECK THIS BEFORE THIS FUNCTION
+func newIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) (err error) {
+ if err = db.Insert(ctx, &IssueLabel{
+ IssueID: issue.ID,
+ LabelID: label.ID,
+ }); err != nil {
+ return err
+ }
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeLabel,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ Label: label,
+ Content: "1",
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+
+ issue.Labels = append(issue.Labels, label)
+
+ return updateLabelCols(ctx, label, "num_issues", "num_closed_issue")
+}
+
+// Remove all issue labels in the given exclusive scope
+func RemoveDuplicateExclusiveIssueLabels(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) (err error) {
+ scope := label.ExclusiveScope()
+ if scope == "" {
+ return nil
+ }
+
+ var toRemove []*Label
+ for _, issueLabel := range issue.Labels {
+ if label.ID != issueLabel.ID && issueLabel.ExclusiveScope() == scope {
+ toRemove = append(toRemove, issueLabel)
+ }
+ }
+
+ for _, issueLabel := range toRemove {
+ if err = deleteIssueLabel(ctx, issue, issueLabel, doer); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// NewIssueLabel creates a new issue-label relation.
+func NewIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) (err error) {
+ if HasIssueLabel(ctx, issue.ID, label.ID) {
+ return nil
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // Do NOT add invalid labels
+ if issue.RepoID != label.RepoID && issue.Repo.OwnerID != label.OrgID {
+ return nil
+ }
+
+ if err = RemoveDuplicateExclusiveIssueLabels(ctx, issue, label, doer); err != nil {
+ return nil
+ }
+
+ if err = newIssueLabel(ctx, issue, label, doer); err != nil {
+ return err
+ }
+
+ issue.isLabelsLoaded = false
+ issue.Labels = nil
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// newIssueLabels add labels to an issue. It will check if the labels are valid for the issue
+func newIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ for _, l := range labels {
+ // Don't add already present labels and invalid labels
+ if HasIssueLabel(ctx, issue.ID, l.ID) ||
+ (l.RepoID != issue.RepoID && l.OrgID != issue.Repo.OwnerID) {
+ continue
+ }
+
+ if err = RemoveDuplicateExclusiveIssueLabels(ctx, issue, l, doer); err != nil {
+ return err
+ }
+
+ if err = newIssueLabel(ctx, issue, l, doer); err != nil {
+ return fmt.Errorf("newIssueLabel: %w", err)
+ }
+ }
+
+ return nil
+}
+
+// NewIssueLabels creates a list of issue-label relations.
+func NewIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = newIssueLabels(ctx, issue, labels, doer); err != nil {
+ return err
+ }
+
+ // reload all labels
+ issue.isLabelsLoaded = false
+ issue.Labels = nil
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+func deleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) (err error) {
+ if count, err := db.DeleteByBean(ctx, &IssueLabel{
+ IssueID: issue.ID,
+ LabelID: label.ID,
+ }); err != nil {
+ return err
+ } else if count == 0 {
+ return nil
+ }
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeLabel,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ Label: label,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+
+ return updateLabelCols(ctx, label, "num_issues", "num_closed_issue")
+}
+
+// DeleteIssueLabel deletes issue-label relation.
+func DeleteIssueLabel(ctx context.Context, issue *Issue, label *Label, doer *user_model.User) error {
+ if err := deleteIssueLabel(ctx, issue, label, doer); err != nil {
+ return err
+ }
+
+ issue.Labels = nil
+ return issue.LoadLabels(ctx)
+}
+
+// DeleteLabelsByRepoID deletes labels of some repository
+func DeleteLabelsByRepoID(ctx context.Context, repoID int64) error {
+ deleteCond := builder.Select("id").From("label").Where(builder.Eq{"label.repo_id": repoID})
+
+ if _, err := db.GetEngine(ctx).In("label_id", deleteCond).
+ Delete(&IssueLabel{}); err != nil {
+ return err
+ }
+
+ _, err := db.DeleteByBean(ctx, &Label{RepoID: repoID})
+ return err
+}
+
+// CountOrphanedLabels return count of labels witch are broken and not accessible via ui anymore
+func CountOrphanedLabels(ctx context.Context) (int64, error) {
+ noref, err := db.GetEngine(ctx).Table("label").Where("repo_id=? AND org_id=?", 0, 0).Count()
+ if err != nil {
+ return 0, err
+ }
+
+ norepo, err := db.GetEngine(ctx).Table("label").
+ Where(builder.And(
+ builder.Gt{"repo_id": 0},
+ builder.NotIn("repo_id", builder.Select("id").From("`repository`")),
+ )).
+ Count()
+ if err != nil {
+ return 0, err
+ }
+
+ noorg, err := db.GetEngine(ctx).Table("label").
+ Where(builder.And(
+ builder.Gt{"org_id": 0},
+ builder.NotIn("org_id", builder.Select("id").From("`user`")),
+ )).
+ Count()
+ if err != nil {
+ return 0, err
+ }
+
+ return noref + norepo + noorg, nil
+}
+
+// DeleteOrphanedLabels delete labels witch are broken and not accessible via ui anymore
+func DeleteOrphanedLabels(ctx context.Context) error {
+ // delete labels with no reference
+ if _, err := db.GetEngine(ctx).Table("label").Where("repo_id=? AND org_id=?", 0, 0).Delete(new(Label)); err != nil {
+ return err
+ }
+
+ // delete labels with none existing repos
+ if _, err := db.GetEngine(ctx).
+ Where(builder.And(
+ builder.Gt{"repo_id": 0},
+ builder.NotIn("repo_id", builder.Select("id").From("`repository`")),
+ )).
+ Delete(Label{}); err != nil {
+ return err
+ }
+
+ // delete labels with none existing orgs
+ if _, err := db.GetEngine(ctx).
+ Where(builder.And(
+ builder.Gt{"org_id": 0},
+ builder.NotIn("org_id", builder.Select("id").From("`user`")),
+ )).
+ Delete(Label{}); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// CountOrphanedIssueLabels return count of IssueLabels witch have no label behind anymore
+func CountOrphanedIssueLabels(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).Table("issue_label").
+ NotIn("label_id", builder.Select("id").From("label")).
+ Count()
+}
+
+// DeleteOrphanedIssueLabels delete IssueLabels witch have no label behind anymore
+func DeleteOrphanedIssueLabels(ctx context.Context) error {
+ _, err := db.GetEngine(ctx).
+ NotIn("label_id", builder.Select("id").From("label")).
+ Delete(IssueLabel{})
+ return err
+}
+
+// CountIssueLabelWithOutsideLabels count label comments with outside label
+func CountIssueLabelWithOutsideLabels(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).Where(builder.Expr("(label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id)")).
+ Table("issue_label").
+ Join("inner", "label", "issue_label.label_id = label.id ").
+ Join("inner", "issue", "issue.id = issue_label.issue_id ").
+ Join("inner", "repository", "issue.repo_id = repository.id").
+ Count(new(IssueLabel))
+}
+
+// FixIssueLabelWithOutsideLabels fix label comments with outside label
+func FixIssueLabelWithOutsideLabels(ctx context.Context) (int64, error) {
+ res, err := db.GetEngine(ctx).Exec(`DELETE FROM issue_label WHERE issue_label.id IN (
+ SELECT il_too.id FROM (
+ SELECT il_too_too.id
+ FROM issue_label AS il_too_too
+ INNER JOIN label ON il_too_too.label_id = label.id
+ INNER JOIN issue on issue.id = il_too_too.issue_id
+ INNER JOIN repository on repository.id = issue.repo_id
+ WHERE
+ (label.org_id = 0 AND issue.repo_id != label.repo_id) OR (label.repo_id = 0 AND label.org_id != repository.owner_id)
+ ) AS il_too )`)
+ if err != nil {
+ return 0, err
+ }
+
+ return res.RowsAffected()
+}
+
+// LoadLabels loads labels
+func (issue *Issue) LoadLabels(ctx context.Context) (err error) {
+ if !issue.isLabelsLoaded && issue.Labels == nil && issue.ID != 0 {
+ issue.Labels, err = GetLabelsByIssueID(ctx, issue.ID)
+ if err != nil {
+ return fmt.Errorf("getLabelsByIssueID [%d]: %w", issue.ID, err)
+ }
+ issue.isLabelsLoaded = true
+ }
+ return nil
+}
+
+// GetLabelsByIssueID returns all labels that belong to given issue by ID.
+func GetLabelsByIssueID(ctx context.Context, issueID int64) ([]*Label, error) {
+ var labels []*Label
+ return labels, db.GetEngine(ctx).Where("issue_label.issue_id = ?", issueID).
+ Join("LEFT", "issue_label", "issue_label.label_id = label.id").
+ Asc("label.name").
+ Find(&labels)
+}
+
+func clearIssueLabels(ctx context.Context, issue *Issue, doer *user_model.User) (err error) {
+ if err = issue.LoadLabels(ctx); err != nil {
+ return fmt.Errorf("getLabels: %w", err)
+ }
+
+ for i := range issue.Labels {
+ if err = deleteIssueLabel(ctx, issue, issue.Labels[i], doer); err != nil {
+ return fmt.Errorf("removeLabel: %w", err)
+ }
+ }
+
+ return nil
+}
+
+// ClearIssueLabels removes all issue labels as the given user.
+// Triggers appropriate WebHooks, if any.
+func ClearIssueLabels(ctx context.Context, issue *Issue, doer *user_model.User) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ } else if err = issue.LoadPullRequest(ctx); err != nil {
+ return err
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanWriteIssuesOrPulls(issue.IsPull) {
+ return ErrRepoLabelNotExist{}
+ }
+
+ if err = clearIssueLabels(ctx, issue, doer); err != nil {
+ return err
+ }
+
+ if err = committer.Commit(); err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ return nil
+}
+
+type labelSorter []*Label
+
+func (ts labelSorter) Len() int {
+ return len([]*Label(ts))
+}
+
+func (ts labelSorter) Less(i, j int) bool {
+ return []*Label(ts)[i].ID < []*Label(ts)[j].ID
+}
+
+func (ts labelSorter) Swap(i, j int) {
+ []*Label(ts)[i], []*Label(ts)[j] = []*Label(ts)[j], []*Label(ts)[i]
+}
+
+// Ensure only one label of a given scope exists, with labels at the end of the
+// array getting preference over earlier ones.
+func RemoveDuplicateExclusiveLabels(labels []*Label) []*Label {
+ validLabels := make([]*Label, 0, len(labels))
+
+ for i, label := range labels {
+ scope := label.ExclusiveScope()
+ if scope != "" {
+ foundOther := false
+ for _, otherLabel := range labels[i+1:] {
+ if otherLabel.ExclusiveScope() == scope {
+ foundOther = true
+ break
+ }
+ }
+ if foundOther {
+ continue
+ }
+ }
+ validLabels = append(validLabels, label)
+ }
+
+ return validLabels
+}
+
+// ReplaceIssueLabels removes all current labels and add new labels to the issue.
+// Triggers appropriate WebHooks, if any.
+func ReplaceIssueLabels(ctx context.Context, issue *Issue, labels []*Label, doer *user_model.User) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ labels = RemoveDuplicateExclusiveLabels(labels)
+
+ sort.Sort(labelSorter(labels))
+ sort.Sort(labelSorter(issue.Labels))
+
+ var toAdd, toRemove []*Label
+
+ addIndex, removeIndex := 0, 0
+ for addIndex < len(labels) && removeIndex < len(issue.Labels) {
+ addLabel := labels[addIndex]
+ removeLabel := issue.Labels[removeIndex]
+ if addLabel.ID == removeLabel.ID {
+ // Silently drop invalid labels
+ if removeLabel.RepoID != issue.RepoID && removeLabel.OrgID != issue.Repo.OwnerID {
+ toRemove = append(toRemove, removeLabel)
+ }
+
+ addIndex++
+ removeIndex++
+ } else if addLabel.ID < removeLabel.ID {
+ // Only add if the label is valid
+ if addLabel.RepoID == issue.RepoID || addLabel.OrgID == issue.Repo.OwnerID {
+ toAdd = append(toAdd, addLabel)
+ }
+ addIndex++
+ } else {
+ toRemove = append(toRemove, removeLabel)
+ removeIndex++
+ }
+ }
+ toAdd = append(toAdd, labels[addIndex:]...)
+ toRemove = append(toRemove, issue.Labels[removeIndex:]...)
+
+ if len(toAdd) > 0 {
+ if err = newIssueLabels(ctx, issue, toAdd, doer); err != nil {
+ return fmt.Errorf("addLabels: %w", err)
+ }
+ }
+
+ for _, l := range toRemove {
+ if err = deleteIssueLabel(ctx, issue, l, doer); err != nil {
+ return fmt.Errorf("removeLabel: %w", err)
+ }
+ }
+
+ issue.Labels = nil
+ if err = issue.LoadLabels(ctx); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/models/issues/issue_label_test.go b/models/issues/issue_label_test.go
new file mode 100644
index 0000000..b6b39d6
--- /dev/null
+++ b/models/issues/issue_label_test.go
@@ -0,0 +1,30 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewIssueLabelsScope(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 18})
+ label1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 7})
+ label2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 8})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ require.NoError(t, issues_model.NewIssueLabels(db.DefaultContext, issue, []*issues_model.Label{label1, label2}, doer))
+
+ assert.Len(t, issue.Labels, 1)
+ assert.Equal(t, label2.ID, issue.Labels[0].ID)
+}
diff --git a/models/issues/issue_list.go b/models/issues/issue_list.go
new file mode 100644
index 0000000..fe6c630
--- /dev/null
+++ b/models/issues/issue_list.go
@@ -0,0 +1,622 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+
+ "xorm.io/builder"
+)
+
+// IssueList defines a list of issues
+type IssueList []*Issue
+
+// get the repo IDs to be loaded later, these IDs are for issue.Repo and issue.PullRequest.HeadRepo
+func (issues IssueList) getRepoIDs() []int64 {
+ repoIDs := make(container.Set[int64], len(issues))
+ for _, issue := range issues {
+ if issue.Repo == nil {
+ repoIDs.Add(issue.RepoID)
+ }
+ if issue.PullRequest != nil && issue.PullRequest.HeadRepo == nil {
+ repoIDs.Add(issue.PullRequest.HeadRepoID)
+ }
+ }
+ return repoIDs.Values()
+}
+
+// LoadRepositories loads issues' all repositories
+func (issues IssueList) LoadRepositories(ctx context.Context) (repo_model.RepositoryList, error) {
+ if len(issues) == 0 {
+ return nil, nil
+ }
+
+ repoIDs := issues.getRepoIDs()
+ repoMaps := make(map[int64]*repo_model.Repository, len(repoIDs))
+ left := len(repoIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ err := db.GetEngine(ctx).
+ In("id", repoIDs[:limit]).
+ Find(&repoMaps)
+ if err != nil {
+ return nil, fmt.Errorf("find repository: %w", err)
+ }
+ left -= limit
+ repoIDs = repoIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ if issue.Repo == nil {
+ issue.Repo = repoMaps[issue.RepoID]
+ } else {
+ repoMaps[issue.RepoID] = issue.Repo
+ }
+ if issue.PullRequest != nil {
+ issue.PullRequest.BaseRepo = issue.Repo
+ if issue.PullRequest.HeadRepo == nil {
+ issue.PullRequest.HeadRepo = repoMaps[issue.PullRequest.HeadRepoID]
+ }
+ }
+ }
+ return repo_model.ValuesRepository(repoMaps), nil
+}
+
+func (issues IssueList) LoadPosters(ctx context.Context) error {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ posterIDs := container.FilterSlice(issues, func(issue *Issue) (int64, bool) {
+ return issue.PosterID, issue.Poster == nil && user_model.IsValidUserID(issue.PosterID)
+ })
+
+ posterMaps, err := getPostersByIDs(ctx, posterIDs)
+ if err != nil {
+ return err
+ }
+
+ for _, issue := range issues {
+ if issue.Poster == nil {
+ issue.PosterID, issue.Poster = user_model.GetUserFromMap(issue.PosterID, posterMaps)
+ }
+ }
+ return nil
+}
+
+func getPostersByIDs(ctx context.Context, posterIDs []int64) (map[int64]*user_model.User, error) {
+ posterMaps := make(map[int64]*user_model.User, len(posterIDs))
+ left := len(posterIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ err := db.GetEngine(ctx).
+ In("id", posterIDs[:limit]).
+ Find(&posterMaps)
+ if err != nil {
+ return nil, err
+ }
+ left -= limit
+ posterIDs = posterIDs[limit:]
+ }
+ return posterMaps, nil
+}
+
+func (issues IssueList) getIssueIDs() []int64 {
+ ids := make([]int64, 0, len(issues))
+ for _, issue := range issues {
+ ids = append(ids, issue.ID)
+ }
+ return ids
+}
+
+func (issues IssueList) LoadLabels(ctx context.Context) error {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ type LabelIssue struct {
+ Label *Label `xorm:"extends"`
+ IssueLabel *IssueLabel `xorm:"extends"`
+ }
+
+ issueLabels := make(map[int64][]*Label, len(issues)*3)
+ issueIDs := issues.getIssueIDs()
+ left := len(issueIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).Table("label").
+ Join("LEFT", "issue_label", "issue_label.label_id = label.id").
+ In("issue_label.issue_id", issueIDs[:limit]).
+ Asc("label.name").
+ Rows(new(LabelIssue))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var labelIssue LabelIssue
+ err = rows.Scan(&labelIssue)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.LoadLabels: Close: %w", err1)
+ }
+ return err
+ }
+ issueLabels[labelIssue.IssueLabel.IssueID] = append(issueLabels[labelIssue.IssueLabel.IssueID], labelIssue.Label)
+ }
+ // When there are no rows left and we try to close it.
+ // Since that is not relevant for us, we can safely ignore it.
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.LoadLabels: Close: %w", err1)
+ }
+ left -= limit
+ issueIDs = issueIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Labels = issueLabels[issue.ID]
+ issue.isLabelsLoaded = true
+ }
+ return nil
+}
+
+func (issues IssueList) getMilestoneIDs() []int64 {
+ return container.FilterSlice(issues, func(issue *Issue) (int64, bool) {
+ return issue.MilestoneID, true
+ })
+}
+
+func (issues IssueList) LoadMilestones(ctx context.Context) error {
+ milestoneIDs := issues.getMilestoneIDs()
+ if len(milestoneIDs) == 0 {
+ return nil
+ }
+
+ milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
+ left := len(milestoneIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ err := db.GetEngine(ctx).
+ In("id", milestoneIDs[:limit]).
+ Find(&milestoneMaps)
+ if err != nil {
+ return err
+ }
+ left -= limit
+ milestoneIDs = milestoneIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Milestone = milestoneMaps[issue.MilestoneID]
+ issue.isMilestoneLoaded = true
+ }
+ return nil
+}
+
+func (issues IssueList) LoadProjects(ctx context.Context) error {
+ issueIDs := issues.getIssueIDs()
+ projectMaps := make(map[int64]*project_model.Project, len(issues))
+ left := len(issueIDs)
+
+ type projectWithIssueID struct {
+ *project_model.Project `xorm:"extends"`
+ IssueID int64
+ }
+
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+
+ projects := make([]*projectWithIssueID, 0, limit)
+ err := db.GetEngine(ctx).
+ Table("project").
+ Select("project.*, project_issue.issue_id").
+ Join("INNER", "project_issue", "project.id = project_issue.project_id").
+ In("project_issue.issue_id", issueIDs[:limit]).
+ Find(&projects)
+ if err != nil {
+ return err
+ }
+ for _, project := range projects {
+ projectMaps[project.IssueID] = project.Project
+ }
+ left -= limit
+ issueIDs = issueIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Project = projectMaps[issue.ID]
+ }
+ return nil
+}
+
+func (issues IssueList) LoadAssignees(ctx context.Context) error {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ type AssigneeIssue struct {
+ IssueAssignee *IssueAssignees `xorm:"extends"`
+ Assignee *user_model.User `xorm:"extends"`
+ }
+
+ assignees := make(map[int64][]*user_model.User, len(issues))
+ issueIDs := issues.getIssueIDs()
+ left := len(issueIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).Table("issue_assignees").
+ Join("INNER", "`user`", "`user`.id = `issue_assignees`.assignee_id").
+ In("`issue_assignees`.issue_id", issueIDs[:limit]).OrderBy(user_model.GetOrderByName()).
+ Rows(new(AssigneeIssue))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var assigneeIssue AssigneeIssue
+ err = rows.Scan(&assigneeIssue)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadAssignees: Close: %w", err1)
+ }
+ return err
+ }
+
+ assignees[assigneeIssue.IssueAssignee.IssueID] = append(assignees[assigneeIssue.IssueAssignee.IssueID], assigneeIssue.Assignee)
+ }
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadAssignees: Close: %w", err1)
+ }
+ left -= limit
+ issueIDs = issueIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Assignees = assignees[issue.ID]
+ if len(issue.Assignees) > 0 {
+ issue.Assignee = issue.Assignees[0]
+ }
+ issue.isAssigneeLoaded = true
+ }
+ return nil
+}
+
+func (issues IssueList) getPullIssueIDs() []int64 {
+ ids := make([]int64, 0, len(issues))
+ for _, issue := range issues {
+ if issue.IsPull && issue.PullRequest == nil {
+ ids = append(ids, issue.ID)
+ }
+ }
+ return ids
+}
+
+// LoadPullRequests loads pull requests
+func (issues IssueList) LoadPullRequests(ctx context.Context) error {
+ issuesIDs := issues.getPullIssueIDs()
+ if len(issuesIDs) == 0 {
+ return nil
+ }
+
+ pullRequestMaps := make(map[int64]*PullRequest, len(issuesIDs))
+ left := len(issuesIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("issue_id", issuesIDs[:limit]).
+ Rows(new(PullRequest))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var pr PullRequest
+ err = rows.Scan(&pr)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadPullRequests: Close: %w", err1)
+ }
+ return err
+ }
+ pullRequestMaps[pr.IssueID] = &pr
+ }
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadPullRequests: Close: %w", err1)
+ }
+ left -= limit
+ issuesIDs = issuesIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.PullRequest = pullRequestMaps[issue.ID]
+ if issue.PullRequest != nil {
+ issue.PullRequest.Issue = issue
+ }
+ }
+ return nil
+}
+
+// LoadAttachments loads attachments
+func (issues IssueList) LoadAttachments(ctx context.Context) (err error) {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ attachments := make(map[int64][]*repo_model.Attachment, len(issues))
+ issuesIDs := issues.getIssueIDs()
+ left := len(issuesIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).
+ In("issue_id", issuesIDs[:limit]).
+ Rows(new(repo_model.Attachment))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var attachment repo_model.Attachment
+ err = rows.Scan(&attachment)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadAttachments: Close: %w", err1)
+ }
+ return err
+ }
+ attachments[attachment.IssueID] = append(attachments[attachment.IssueID], &attachment)
+ }
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadAttachments: Close: %w", err1)
+ }
+ left -= limit
+ issuesIDs = issuesIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Attachments = attachments[issue.ID]
+ issue.isAttachmentsLoaded = true
+ }
+ return nil
+}
+
+func (issues IssueList) loadComments(ctx context.Context, cond builder.Cond) (err error) {
+ if len(issues) == 0 {
+ return nil
+ }
+
+ comments := make(map[int64][]*Comment, len(issues))
+ issuesIDs := issues.getIssueIDs()
+ left := len(issuesIDs)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+ rows, err := db.GetEngine(ctx).Table("comment").
+ Join("INNER", "issue", "issue.id = comment.issue_id").
+ In("issue.id", issuesIDs[:limit]).
+ Where(cond).
+ Rows(new(Comment))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var comment Comment
+ err = rows.Scan(&comment)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadComments: Close: %w", err1)
+ }
+ return err
+ }
+ comments[comment.IssueID] = append(comments[comment.IssueID], &comment)
+ }
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadComments: Close: %w", err1)
+ }
+ left -= limit
+ issuesIDs = issuesIDs[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.Comments = comments[issue.ID]
+ }
+ return nil
+}
+
+func (issues IssueList) loadTotalTrackedTimes(ctx context.Context) (err error) {
+ type totalTimesByIssue struct {
+ IssueID int64
+ Time int64
+ }
+ if len(issues) == 0 {
+ return nil
+ }
+ trackedTimes := make(map[int64]int64, len(issues))
+
+ reposMap := make(map[int64]*repo_model.Repository, len(issues))
+ for _, issue := range issues {
+ reposMap[issue.RepoID] = issue.Repo
+ }
+ repos := repo_model.RepositoryListOfMap(reposMap)
+
+ if err := repos.LoadUnits(ctx); err != nil {
+ return err
+ }
+
+ ids := make([]int64, 0, len(issues))
+ for _, issue := range issues {
+ if issue.Repo.IsTimetrackerEnabled(ctx) {
+ ids = append(ids, issue.ID)
+ }
+ }
+
+ left := len(ids)
+ for left > 0 {
+ limit := db.DefaultMaxInSize
+ if left < limit {
+ limit = left
+ }
+
+ // select issue_id, sum(time) from tracked_time where issue_id in (<issue ids in current page>) group by issue_id
+ rows, err := db.GetEngine(ctx).Table("tracked_time").
+ Where("deleted = ?", false).
+ Select("issue_id, sum(time) as time").
+ In("issue_id", ids[:limit]).
+ GroupBy("issue_id").
+ Rows(new(totalTimesByIssue))
+ if err != nil {
+ return err
+ }
+
+ for rows.Next() {
+ var totalTime totalTimesByIssue
+ err = rows.Scan(&totalTime)
+ if err != nil {
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadTotalTrackedTimes: Close: %w", err1)
+ }
+ return err
+ }
+ trackedTimes[totalTime.IssueID] = totalTime.Time
+ }
+ if err1 := rows.Close(); err1 != nil {
+ return fmt.Errorf("IssueList.loadTotalTrackedTimes: Close: %w", err1)
+ }
+ left -= limit
+ ids = ids[limit:]
+ }
+
+ for _, issue := range issues {
+ issue.TotalTrackedTime = trackedTimes[issue.ID]
+ }
+ return nil
+}
+
+// loadAttributes loads all attributes, expect for attachments and comments
+func (issues IssueList) LoadAttributes(ctx context.Context) error {
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: LoadRepositories: %w", err)
+ }
+
+ if err := issues.LoadPosters(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: LoadPosters: %w", err)
+ }
+
+ if err := issues.LoadLabels(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: LoadLabels: %w", err)
+ }
+
+ if err := issues.LoadMilestones(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: LoadMilestones: %w", err)
+ }
+
+ if err := issues.LoadProjects(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: loadProjects: %w", err)
+ }
+
+ if err := issues.LoadAssignees(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: loadAssignees: %w", err)
+ }
+
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: loadPullRequests: %w", err)
+ }
+
+ if err := issues.loadTotalTrackedTimes(ctx); err != nil {
+ return fmt.Errorf("issue.loadAttributes: loadTotalTrackedTimes: %w", err)
+ }
+
+ return nil
+}
+
+// LoadComments loads comments
+func (issues IssueList) LoadComments(ctx context.Context) error {
+ return issues.loadComments(ctx, builder.NewCond())
+}
+
+// LoadDiscussComments loads discuss comments
+func (issues IssueList) LoadDiscussComments(ctx context.Context) error {
+ return issues.loadComments(ctx, builder.Eq{"comment.type": CommentTypeComment})
+}
+
+// GetApprovalCounts returns a map of issue ID to slice of approval counts
+// FIXME: only returns official counts due to double counting of non-official approvals
+func (issues IssueList) GetApprovalCounts(ctx context.Context) (map[int64][]*ReviewCount, error) {
+ rCounts := make([]*ReviewCount, 0, 2*len(issues))
+ ids := make([]int64, len(issues))
+ for i, issue := range issues {
+ ids[i] = issue.ID
+ }
+ sess := db.GetEngine(ctx).In("issue_id", ids)
+ err := sess.Select("issue_id, type, count(id) as `count`").
+ Where("official = ? AND dismissed = ?", true, false).
+ GroupBy("issue_id, type").
+ OrderBy("issue_id").
+ Table("review").
+ Find(&rCounts)
+ if err != nil {
+ return nil, err
+ }
+
+ approvalCountMap := make(map[int64][]*ReviewCount, len(issues))
+
+ for _, c := range rCounts {
+ approvalCountMap[c.IssueID] = append(approvalCountMap[c.IssueID], c)
+ }
+
+ return approvalCountMap, nil
+}
+
+func (issues IssueList) LoadIsRead(ctx context.Context, userID int64) error {
+ issueIDs := issues.getIssueIDs()
+ issueUsers := make([]*IssueUser, 0, len(issueIDs))
+ if err := db.GetEngine(ctx).Where("uid =?", userID).
+ In("issue_id", issueIDs).
+ Find(&issueUsers); err != nil {
+ return err
+ }
+
+ for _, issueUser := range issueUsers {
+ for _, issue := range issues {
+ if issue.ID == issueUser.IssueID {
+ issue.IsRead = issueUser.IsRead
+ }
+ }
+ }
+
+ return nil
+}
diff --git a/models/issues/issue_list_test.go b/models/issues/issue_list_test.go
new file mode 100644
index 0000000..32cc0fe
--- /dev/null
+++ b/models/issues/issue_list_test.go
@@ -0,0 +1,129 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestIssueList_LoadRepositories(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issueList := issues_model.IssueList{
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}),
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2}),
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 4}),
+ }
+
+ repos, err := issueList.LoadRepositories(db.DefaultContext)
+ require.NoError(t, err)
+ assert.Len(t, repos, 2)
+ for _, issue := range issueList {
+ assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ }
+}
+
+func TestIssueList_LoadAttributes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ setting.Service.EnableTimetracking = true
+ issueList := issues_model.IssueList{
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}),
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 4}),
+ }
+
+ require.NoError(t, issueList.LoadAttributes(db.DefaultContext))
+ for _, issue := range issueList {
+ assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ for _, label := range issue.Labels {
+ assert.EqualValues(t, issue.RepoID, label.RepoID)
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID})
+ }
+ if issue.PosterID > 0 {
+ assert.EqualValues(t, issue.PosterID, issue.Poster.ID)
+ }
+ if issue.AssigneeID > 0 {
+ assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID)
+ }
+ if issue.MilestoneID > 0 {
+ assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID)
+ }
+ if issue.IsPull {
+ assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID)
+ }
+ for _, attachment := range issue.Attachments {
+ assert.EqualValues(t, issue.ID, attachment.IssueID)
+ }
+ for _, comment := range issue.Comments {
+ assert.EqualValues(t, issue.ID, comment.IssueID)
+ }
+ if issue.ID == int64(1) {
+ assert.Equal(t, int64(400), issue.TotalTrackedTime)
+ assert.NotNil(t, issue.Project)
+ assert.Equal(t, int64(1), issue.Project.ID)
+ } else {
+ assert.Nil(t, issue.Project)
+ }
+ }
+
+ require.NoError(t, issueList.LoadIsRead(db.DefaultContext, 1))
+ for _, issue := range issueList {
+ assert.Equal(t, issue.ID == 1, issue.IsRead, "unexpected is_read value for issue[%d]", issue.ID)
+ }
+}
+
+func TestIssueListLoadUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ for _, testCase := range []struct {
+ poster int64
+ user *user_model.User
+ }{
+ {
+ poster: user_model.ActionsUserID,
+ user: user_model.NewActionsUser(),
+ },
+ {
+ poster: user_model.GhostUserID,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: doer.ID,
+ user: doer,
+ },
+ {
+ poster: 0,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: -200,
+ user: user_model.NewGhostUser(),
+ },
+ {
+ poster: 200,
+ user: user_model.NewGhostUser(),
+ },
+ } {
+ t.Run(testCase.user.Name, func(t *testing.T) {
+ list := issues_model.IssueList{issue}
+
+ issue.PosterID = testCase.poster
+ issue.Poster = nil
+ require.NoError(t, list.LoadPosters(db.DefaultContext))
+ require.NotNil(t, issue.Poster)
+ assert.Equal(t, testCase.user.ID, issue.Poster.ID)
+ })
+ }
+}
diff --git a/models/issues/issue_lock.go b/models/issues/issue_lock.go
new file mode 100644
index 0000000..b21629b
--- /dev/null
+++ b/models/issues/issue_lock.go
@@ -0,0 +1,66 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+// IssueLockOptions defines options for locking and/or unlocking an issue/PR
+type IssueLockOptions struct {
+ Doer *user_model.User
+ Issue *Issue
+ Reason string
+}
+
+// LockIssue locks an issue. This would limit commenting abilities to
+// users with write access to the repo
+func LockIssue(ctx context.Context, opts *IssueLockOptions) error {
+ return updateIssueLock(ctx, opts, true)
+}
+
+// UnlockIssue unlocks a previously locked issue.
+func UnlockIssue(ctx context.Context, opts *IssueLockOptions) error {
+ return updateIssueLock(ctx, opts, false)
+}
+
+func updateIssueLock(ctx context.Context, opts *IssueLockOptions, lock bool) error {
+ if opts.Issue.IsLocked == lock {
+ return nil
+ }
+
+ opts.Issue.IsLocked = lock
+ var commentType CommentType
+ if opts.Issue.IsLocked {
+ commentType = CommentTypeLock
+ } else {
+ commentType = CommentTypeUnlock
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := UpdateIssueCols(ctx, opts.Issue, "is_locked"); err != nil {
+ return err
+ }
+
+ opt := &CreateCommentOptions{
+ Doer: opts.Doer,
+ Issue: opts.Issue,
+ Repo: opts.Issue.Repo,
+ Type: commentType,
+ Content: opts.Reason,
+ }
+ if _, err := CreateComment(ctx, opt); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
diff --git a/models/issues/issue_project.go b/models/issues/issue_project.go
new file mode 100644
index 0000000..835ea1d
--- /dev/null
+++ b/models/issues/issue_project.go
@@ -0,0 +1,162 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ project_model "code.gitea.io/gitea/models/project"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// LoadProject load the project the issue was assigned to
+func (issue *Issue) LoadProject(ctx context.Context) (err error) {
+ if issue.Project == nil {
+ var p project_model.Project
+ has, err := db.GetEngine(ctx).Table("project").
+ Join("INNER", "project_issue", "project.id=project_issue.project_id").
+ Where("project_issue.issue_id = ?", issue.ID).Get(&p)
+ if err != nil {
+ return err
+ } else if has {
+ issue.Project = &p
+ }
+ }
+ return err
+}
+
+func (issue *Issue) projectID(ctx context.Context) int64 {
+ var ip project_model.ProjectIssue
+ has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
+ if err != nil || !has {
+ return 0
+ }
+ return ip.ProjectID
+}
+
+// ProjectColumnID return project column id if issue was assigned to one
+func (issue *Issue) ProjectColumnID(ctx context.Context) int64 {
+ var ip project_model.ProjectIssue
+ has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
+ if err != nil || !has {
+ return 0
+ }
+ return ip.ProjectColumnID
+}
+
+// LoadIssuesFromColumn load issues assigned to this column
+func LoadIssuesFromColumn(ctx context.Context, b *project_model.Column) (IssueList, error) {
+ issueList, err := Issues(ctx, &IssuesOptions{
+ ProjectColumnID: b.ID,
+ ProjectID: b.ProjectID,
+ SortType: "project-column-sorting",
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ if b.Default {
+ issues, err := Issues(ctx, &IssuesOptions{
+ ProjectColumnID: db.NoConditionID,
+ ProjectID: b.ProjectID,
+ SortType: "project-column-sorting",
+ })
+ if err != nil {
+ return nil, err
+ }
+ issueList = append(issueList, issues...)
+ }
+
+ if err := issueList.LoadComments(ctx); err != nil {
+ return nil, err
+ }
+
+ return issueList, nil
+}
+
+// LoadIssuesFromColumnList load issues assigned to the columns
+func LoadIssuesFromColumnList(ctx context.Context, bs project_model.ColumnList) (map[int64]IssueList, error) {
+ issuesMap := make(map[int64]IssueList, len(bs))
+ for i := range bs {
+ il, err := LoadIssuesFromColumn(ctx, bs[i])
+ if err != nil {
+ return nil, err
+ }
+ issuesMap[bs[i].ID] = il
+ }
+ return issuesMap, nil
+}
+
+// IssueAssignOrRemoveProject changes the project associated with an issue
+// If newProjectID is 0, the issue is removed from the project
+func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error {
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ oldProjectID := issue.projectID(ctx)
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // Only check if we add a new project and not remove it.
+ if newProjectID > 0 {
+ newProject, err := project_model.GetProjectByID(ctx, newProjectID)
+ if err != nil {
+ return err
+ }
+ if !newProject.CanBeAccessedByOwnerRepo(issue.Repo.OwnerID, issue.Repo) {
+ return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID)
+ }
+ if newColumnID == 0 {
+ newDefaultColumn, err := newProject.GetDefaultColumn(ctx)
+ if err != nil {
+ return err
+ }
+ newColumnID = newDefaultColumn.ID
+ }
+ }
+
+ if _, err := db.GetEngine(ctx).Where("project_issue.issue_id=?", issue.ID).Delete(&project_model.ProjectIssue{}); err != nil {
+ return err
+ }
+
+ if oldProjectID > 0 || newProjectID > 0 {
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeProject,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldProjectID: oldProjectID,
+ ProjectID: newProjectID,
+ }); err != nil {
+ return err
+ }
+ }
+ if newProjectID == 0 {
+ return nil
+ }
+ if newColumnID == 0 {
+ panic("newColumnID must not be zero") // shouldn't happen
+ }
+
+ res := struct {
+ MaxSorting int64
+ IssueCount int64
+ }{}
+ if _, err := db.GetEngine(ctx).Select("max(sorting) as max_sorting, count(*) as issue_count").Table("project_issue").
+ Where("project_id=?", newProjectID).
+ And("project_board_id=?", newColumnID).
+ Get(&res); err != nil {
+ return err
+ }
+ newSorting := util.Iif(res.IssueCount > 0, res.MaxSorting+1, 0)
+ return db.Insert(ctx, &project_model.ProjectIssue{
+ IssueID: issue.ID,
+ ProjectID: newProjectID,
+ ProjectColumnID: newColumnID,
+ Sorting: newSorting,
+ })
+ })
+}
diff --git a/models/issues/issue_search.go b/models/issues/issue_search.go
new file mode 100644
index 0000000..e9f116b
--- /dev/null
+++ b/models/issues/issue_search.go
@@ -0,0 +1,489 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/optional"
+
+ "xorm.io/builder"
+ "xorm.io/xorm"
+)
+
+// IssuesOptions represents options of an issue.
+type IssuesOptions struct { //nolint
+ Paginator *db.ListOptions
+ RepoIDs []int64 // overwrites RepoCond if the length is not 0
+ AllPublic bool // include also all public repositories
+ RepoCond builder.Cond
+ AssigneeID int64
+ PosterID int64
+ MentionedID int64
+ ReviewRequestedID int64
+ ReviewedID int64
+ SubscriberID int64
+ MilestoneIDs []int64
+ ProjectID int64
+ ProjectColumnID int64
+ IsClosed optional.Option[bool]
+ IsPull optional.Option[bool]
+ LabelIDs []int64
+ IncludedLabelNames []string
+ ExcludedLabelNames []string
+ IncludeMilestones []string
+ SortType string
+ IssueIDs []int64
+ UpdatedAfterUnix int64
+ UpdatedBeforeUnix int64
+ // prioritize issues from this repo
+ PriorityRepoID int64
+ IsArchived optional.Option[bool]
+ Org *organization.Organization // issues permission scope
+ Team *organization.Team // issues permission scope
+ User *user_model.User // issues permission scope
+}
+
+// applySorts sort an issues-related session based on the provided
+// sortType string
+func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
+ switch sortType {
+ case "oldest":
+ sess.Asc("issue.created_unix").Asc("issue.id")
+ case "recentupdate":
+ sess.Desc("issue.updated_unix").Desc("issue.created_unix").Desc("issue.id")
+ case "leastupdate":
+ sess.Asc("issue.updated_unix").Asc("issue.created_unix").Asc("issue.id")
+ case "mostcomment":
+ sess.Desc("issue.num_comments").Desc("issue.created_unix").Desc("issue.id")
+ case "leastcomment":
+ sess.Asc("issue.num_comments").Desc("issue.created_unix").Desc("issue.id")
+ case "priority":
+ sess.Desc("issue.priority").Desc("issue.created_unix").Desc("issue.id")
+ case "nearduedate":
+ // 253370764800 is 01/01/9999 @ 12:00am (UTC)
+ sess.Join("LEFT", "milestone", "issue.milestone_id = milestone.id").
+ OrderBy("CASE " +
+ "WHEN issue.deadline_unix = 0 AND (milestone.deadline_unix = 0 OR milestone.deadline_unix IS NULL) THEN 253370764800 " +
+ "WHEN milestone.deadline_unix = 0 OR milestone.deadline_unix IS NULL THEN issue.deadline_unix " +
+ "WHEN milestone.deadline_unix < issue.deadline_unix OR issue.deadline_unix = 0 THEN milestone.deadline_unix " +
+ "ELSE issue.deadline_unix END ASC").
+ Desc("issue.created_unix").
+ Desc("issue.id")
+ case "farduedate":
+ sess.Join("LEFT", "milestone", "issue.milestone_id = milestone.id").
+ OrderBy("CASE " +
+ "WHEN milestone.deadline_unix IS NULL THEN issue.deadline_unix " +
+ "WHEN milestone.deadline_unix < issue.deadline_unix OR issue.deadline_unix = 0 THEN milestone.deadline_unix " +
+ "ELSE issue.deadline_unix END DESC").
+ Desc("issue.created_unix").
+ Desc("issue.id")
+ case "priorityrepo":
+ sess.OrderBy("CASE "+
+ "WHEN issue.repo_id = ? THEN 1 "+
+ "ELSE 2 END ASC", priorityRepoID).
+ Desc("issue.created_unix").
+ Desc("issue.id")
+ case "project-column-sorting":
+ sess.Asc("project_issue.sorting").Desc("issue.created_unix").Desc("issue.id")
+ default:
+ sess.Desc("issue.created_unix").Desc("issue.id")
+ }
+}
+
+func applyLimit(sess *xorm.Session, opts *IssuesOptions) {
+ if opts.Paginator == nil || opts.Paginator.IsListAll() {
+ return
+ }
+
+ start := 0
+ if opts.Paginator.Page > 1 {
+ start = (opts.Paginator.Page - 1) * opts.Paginator.PageSize
+ }
+ sess.Limit(opts.Paginator.PageSize, start)
+}
+
+func applyLabelsCondition(sess *xorm.Session, opts *IssuesOptions) {
+ if len(opts.LabelIDs) > 0 {
+ if opts.LabelIDs[0] == 0 {
+ sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_label)")
+ } else {
+ // deduplicate the label IDs for inclusion and exclusion
+ includedLabelIDs := make(container.Set[int64])
+ excludedLabelIDs := make(container.Set[int64])
+ for _, labelID := range opts.LabelIDs {
+ if labelID > 0 {
+ includedLabelIDs.Add(labelID)
+ } else if labelID < 0 { // 0 is not supported here, so just ignore it
+ excludedLabelIDs.Add(-labelID)
+ }
+ }
+ // ... and use them in a subquery of the form :
+ // where (select count(*) from issue_label where issue_id=issue.id and label_id in (2, 4, 6)) = 3
+ // This equality is guaranteed thanks to unique index (issue_id,label_id) on table issue_label.
+ if len(includedLabelIDs) > 0 {
+ subQuery := builder.Select("count(*)").From("issue_label").Where(builder.Expr("issue_id = issue.id")).
+ And(builder.In("label_id", includedLabelIDs.Values()))
+ sess.Where(builder.Eq{strconv.Itoa(len(includedLabelIDs)): subQuery})
+ }
+ // or (select count(*)...) = 0 for excluded labels
+ if len(excludedLabelIDs) > 0 {
+ subQuery := builder.Select("count(*)").From("issue_label").Where(builder.Expr("issue_id = issue.id")).
+ And(builder.In("label_id", excludedLabelIDs.Values()))
+ sess.Where(builder.Eq{"0": subQuery})
+ }
+ }
+ }
+
+ if len(opts.IncludedLabelNames) > 0 {
+ sess.In("issue.id", BuildLabelNamesIssueIDsCondition(opts.IncludedLabelNames))
+ }
+
+ if len(opts.ExcludedLabelNames) > 0 {
+ sess.And(builder.NotIn("issue.id", BuildLabelNamesIssueIDsCondition(opts.ExcludedLabelNames)))
+ }
+}
+
+func applyMilestoneCondition(sess *xorm.Session, opts *IssuesOptions) {
+ if len(opts.MilestoneIDs) == 1 && opts.MilestoneIDs[0] == db.NoConditionID {
+ sess.And("issue.milestone_id = 0")
+ } else if len(opts.MilestoneIDs) > 0 {
+ sess.In("issue.milestone_id", opts.MilestoneIDs)
+ }
+
+ if len(opts.IncludeMilestones) > 0 {
+ sess.In("issue.milestone_id",
+ builder.Select("id").
+ From("milestone").
+ Where(builder.In("name", opts.IncludeMilestones)))
+ }
+}
+
+func applyProjectCondition(sess *xorm.Session, opts *IssuesOptions) {
+ if opts.ProjectID > 0 { // specific project
+ sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id").
+ And("project_issue.project_id=?", opts.ProjectID)
+ } else if opts.ProjectID == db.NoConditionID { // show those that are in no project
+ sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue").And(builder.Neq{"project_id": 0})))
+ }
+ // opts.ProjectID == 0 means all projects,
+ // do not need to apply any condition
+}
+
+func applyProjectColumnCondition(sess *xorm.Session, opts *IssuesOptions) {
+ // opts.ProjectColumnID == 0 means all project columns,
+ // do not need to apply any condition
+ if opts.ProjectColumnID > 0 {
+ sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": opts.ProjectColumnID}))
+ } else if opts.ProjectColumnID == db.NoConditionID {
+ sess.In("issue.id", builder.Select("issue_id").From("project_issue").Where(builder.Eq{"project_board_id": 0}))
+ }
+}
+
+func applyRepoConditions(sess *xorm.Session, opts *IssuesOptions) {
+ if len(opts.RepoIDs) == 1 {
+ opts.RepoCond = builder.Eq{"issue.repo_id": opts.RepoIDs[0]}
+ } else if len(opts.RepoIDs) > 1 {
+ opts.RepoCond = builder.In("issue.repo_id", opts.RepoIDs)
+ }
+ if opts.AllPublic {
+ if opts.RepoCond == nil {
+ opts.RepoCond = builder.NewCond()
+ }
+ opts.RepoCond = opts.RepoCond.Or(builder.In("issue.repo_id", builder.Select("id").From("repository").Where(builder.Eq{"is_private": false})))
+ }
+ if opts.RepoCond != nil {
+ sess.And(opts.RepoCond)
+ }
+}
+
+func applyConditions(sess *xorm.Session, opts *IssuesOptions) {
+ if len(opts.IssueIDs) > 0 {
+ sess.In("issue.id", opts.IssueIDs)
+ }
+
+ applyRepoConditions(sess, opts)
+
+ if opts.IsClosed.Has() {
+ sess.And("issue.is_closed=?", opts.IsClosed.Value())
+ }
+
+ if opts.AssigneeID > 0 {
+ applyAssigneeCondition(sess, opts.AssigneeID)
+ } else if opts.AssigneeID == db.NoConditionID {
+ sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
+ }
+
+ if opts.PosterID > 0 {
+ applyPosterCondition(sess, opts.PosterID)
+ }
+
+ if opts.MentionedID > 0 {
+ applyMentionedCondition(sess, opts.MentionedID)
+ }
+
+ if opts.ReviewRequestedID > 0 {
+ applyReviewRequestedCondition(sess, opts.ReviewRequestedID)
+ }
+
+ if opts.ReviewedID > 0 {
+ applyReviewedCondition(sess, opts.ReviewedID)
+ }
+
+ if opts.SubscriberID > 0 {
+ applySubscribedCondition(sess, opts.SubscriberID)
+ }
+
+ applyMilestoneCondition(sess, opts)
+
+ if opts.UpdatedAfterUnix != 0 {
+ sess.And(builder.Gte{"issue.updated_unix": opts.UpdatedAfterUnix})
+ }
+ if opts.UpdatedBeforeUnix != 0 {
+ sess.And(builder.Lte{"issue.updated_unix": opts.UpdatedBeforeUnix})
+ }
+
+ applyProjectCondition(sess, opts)
+
+ applyProjectColumnCondition(sess, opts)
+
+ if opts.IsPull.Has() {
+ sess.And("issue.is_pull=?", opts.IsPull.Value())
+ }
+
+ if opts.IsArchived.Has() {
+ sess.And(builder.Eq{"repository.is_archived": opts.IsArchived.Value()})
+ }
+
+ applyLabelsCondition(sess, opts)
+
+ if opts.User != nil {
+ sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.User.ID, opts.Org, opts.Team, opts.IsPull.Value()))
+ }
+}
+
+// teamUnitsRepoCond returns query condition for those repo id in the special org team with special units access
+func teamUnitsRepoCond(id string, userID, orgID, teamID int64, units ...unit.Type) builder.Cond {
+ return builder.In(id,
+ builder.Select("repo_id").From("team_repo").Where(
+ builder.Eq{
+ "team_id": teamID,
+ }.And(
+ builder.Or(
+ // Check if the user is member of the team.
+ builder.In(
+ "team_id", builder.Select("team_id").From("team_user").Where(
+ builder.Eq{
+ "uid": userID,
+ },
+ ),
+ ),
+ // Check if the user is in the owner team of the organisation.
+ builder.Exists(builder.Select("team_id").From("team_user").
+ Where(builder.Eq{
+ "org_id": orgID,
+ "team_id": builder.Select("id").From("team").Where(
+ builder.Eq{
+ "org_id": orgID,
+ "lower_name": strings.ToLower(organization.OwnerTeamName),
+ }),
+ "uid": userID,
+ }),
+ ),
+ )).And(
+ builder.In(
+ "team_id", builder.Select("team_id").From("team_unit").Where(
+ builder.Eq{
+ "`team_unit`.org_id": orgID,
+ }.And(
+ builder.In("`team_unit`.type", units),
+ ),
+ ),
+ ),
+ ),
+ ))
+}
+
+// issuePullAccessibleRepoCond userID must not be zero, this condition require join repository table
+func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organization.Organization, team *organization.Team, isPull bool) builder.Cond {
+ cond := builder.NewCond()
+ unitType := unit.TypeIssues
+ if isPull {
+ unitType = unit.TypePullRequests
+ }
+ if org != nil {
+ if team != nil {
+ cond = cond.And(teamUnitsRepoCond(repoIDstr, userID, org.ID, team.ID, unitType)) // special team member repos
+ } else {
+ cond = cond.And(
+ builder.Or(
+ repo_model.UserOrgUnitRepoCond(repoIDstr, userID, org.ID, unitType), // team member repos
+ repo_model.UserOrgPublicUnitRepoCond(userID, org.ID), // user org public non-member repos, TODO: check repo has issues
+ ),
+ )
+ }
+ } else {
+ cond = cond.And(
+ builder.Or(
+ repo_model.UserOwnedRepoCond(userID), // owned repos
+ repo_model.UserAccessRepoCond(repoIDstr, userID), // user can access repo in a unit independent way
+ repo_model.UserAssignedRepoCond(repoIDstr, userID), // user has been assigned accessible public repos
+ repo_model.UserMentionedRepoCond(repoIDstr, userID), // user has been mentioned accessible public repos
+ repo_model.UserCreateIssueRepoCond(repoIDstr, userID, isPull), // user has created issue/pr accessible public repos
+ ),
+ )
+ }
+ return cond
+}
+
+func applyAssigneeCondition(sess *xorm.Session, assigneeID int64) {
+ sess.Join("INNER", "issue_assignees", "issue.id = issue_assignees.issue_id").
+ And("issue_assignees.assignee_id = ?", assigneeID)
+}
+
+func applyPosterCondition(sess *xorm.Session, posterID int64) {
+ sess.And("issue.poster_id=?", posterID)
+}
+
+func applyMentionedCondition(sess *xorm.Session, mentionedID int64) {
+ sess.Join("INNER", "issue_user", "issue.id = issue_user.issue_id").
+ And("issue_user.is_mentioned = ?", true).
+ And("issue_user.uid = ?", mentionedID)
+}
+
+func applyReviewRequestedCondition(sess *xorm.Session, reviewRequestedID int64) {
+ existInTeamQuery := builder.Select("team_user.team_id").
+ From("team_user").
+ Where(builder.Eq{"team_user.uid": reviewRequestedID})
+
+ // if the review is approved or rejected, it should not be shown in the review requested list
+ maxReview := builder.Select("MAX(r.id)").
+ From("review as r").
+ Where(builder.In("r.type", []ReviewType{ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest})).
+ GroupBy("r.issue_id, r.reviewer_id, r.reviewer_team_id")
+
+ subQuery := builder.Select("review.issue_id").
+ From("review").
+ Where(builder.And(
+ builder.Eq{"review.type": ReviewTypeRequest},
+ builder.Or(
+ builder.Eq{"review.reviewer_id": reviewRequestedID},
+ builder.In("review.reviewer_team_id", existInTeamQuery),
+ ),
+ builder.In("review.id", maxReview),
+ ))
+ sess.Where("issue.poster_id <> ?", reviewRequestedID).
+ And(builder.In("issue.id", subQuery))
+}
+
+func applyReviewedCondition(sess *xorm.Session, reviewedID int64) {
+ // Query for pull requests where you are a reviewer or commenter, excluding
+ // any pull requests already returned by the review requested filter.
+ notPoster := builder.Neq{"issue.poster_id": reviewedID}
+ reviewed := builder.In("issue.id", builder.
+ Select("issue_id").
+ From("review").
+ Where(builder.And(
+ builder.Neq{"type": ReviewTypeRequest},
+ builder.Or(
+ builder.Eq{"reviewer_id": reviewedID},
+ builder.In("reviewer_team_id", builder.
+ Select("team_id").
+ From("team_user").
+ Where(builder.Eq{"uid": reviewedID}),
+ ),
+ ),
+ )),
+ )
+ commented := builder.In("issue.id", builder.
+ Select("issue_id").
+ From("comment").
+ Where(builder.And(
+ builder.Eq{"poster_id": reviewedID},
+ builder.In("type", CommentTypeComment, CommentTypeCode, CommentTypeReview),
+ )),
+ )
+ sess.And(notPoster, builder.Or(reviewed, commented))
+}
+
+func applySubscribedCondition(sess *xorm.Session, subscriberID int64) {
+ sess.And(
+ builder.
+ NotIn("issue.id",
+ builder.Select("issue_id").
+ From("issue_watch").
+ Where(builder.Eq{"is_watching": false, "user_id": subscriberID}),
+ ),
+ ).And(
+ builder.Or(
+ builder.In("issue.id", builder.
+ Select("issue_id").
+ From("issue_watch").
+ Where(builder.Eq{"is_watching": true, "user_id": subscriberID}),
+ ),
+ builder.In("issue.id", builder.
+ Select("issue_id").
+ From("comment").
+ Where(builder.Eq{"poster_id": subscriberID}),
+ ),
+ builder.Eq{"issue.poster_id": subscriberID},
+ builder.In("issue.repo_id", builder.
+ Select("id").
+ From("watch").
+ Where(builder.And(builder.Eq{"user_id": subscriberID},
+ builder.In("mode", repo_model.WatchModeNormal, repo_model.WatchModeAuto))),
+ ),
+ ),
+ )
+}
+
+// Issues returns a list of issues by given conditions.
+func Issues(ctx context.Context, opts *IssuesOptions) (IssueList, error) {
+ sess := db.GetEngine(ctx).
+ Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
+ applyLimit(sess, opts)
+ applyConditions(sess, opts)
+ applySorts(sess, opts.SortType, opts.PriorityRepoID)
+
+ issues := IssueList{}
+ if err := sess.Find(&issues); err != nil {
+ return nil, fmt.Errorf("unable to query Issues: %w", err)
+ }
+
+ if err := issues.LoadAttributes(ctx); err != nil {
+ return nil, fmt.Errorf("unable to LoadAttributes for Issues: %w", err)
+ }
+
+ return issues, nil
+}
+
+// IssueIDs returns a list of issue ids by given conditions.
+func IssueIDs(ctx context.Context, opts *IssuesOptions, otherConds ...builder.Cond) ([]int64, int64, error) {
+ sess := db.GetEngine(ctx).
+ Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
+ applyConditions(sess, opts)
+ for _, cond := range otherConds {
+ sess.And(cond)
+ }
+
+ applyLimit(sess, opts)
+ applySorts(sess, opts.SortType, opts.PriorityRepoID)
+
+ var res []int64
+ total, err := sess.Select("`issue`.id").Table(&Issue{}).FindAndCount(&res)
+ if err != nil {
+ return nil, 0, err
+ }
+
+ return res, total, nil
+}
diff --git a/models/issues/issue_stats.go b/models/issues/issue_stats.go
new file mode 100644
index 0000000..dc634cf
--- /dev/null
+++ b/models/issues/issue_stats.go
@@ -0,0 +1,191 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+
+ "xorm.io/builder"
+ "xorm.io/xorm"
+)
+
+// IssueStats represents issue statistic information.
+type IssueStats struct {
+ OpenCount, ClosedCount int64
+ YourRepositoriesCount int64
+ AssignCount int64
+ CreateCount int64
+ MentionCount int64
+ ReviewRequestedCount int64
+ ReviewedCount int64
+}
+
+// Filter modes.
+const (
+ FilterModeAll = iota
+ FilterModeAssign
+ FilterModeCreate
+ FilterModeMention
+ FilterModeReviewRequested
+ FilterModeReviewed
+ FilterModeYourRepositories
+)
+
+const (
+ // MaxQueryParameters represents the max query parameters
+ // When queries are broken down in parts because of the number
+ // of parameters, attempt to break by this amount
+ MaxQueryParameters = 300
+)
+
+// CountIssuesByRepo map from repoID to number of issues matching the options
+func CountIssuesByRepo(ctx context.Context, opts *IssuesOptions) (map[int64]int64, error) {
+ sess := db.GetEngine(ctx).
+ Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
+
+ applyConditions(sess, opts)
+
+ countsSlice := make([]*struct {
+ RepoID int64
+ Count int64
+ }, 0, 10)
+ if err := sess.GroupBy("issue.repo_id").
+ Select("issue.repo_id AS repo_id, COUNT(*) AS count").
+ Table("issue").
+ Find(&countsSlice); err != nil {
+ return nil, fmt.Errorf("unable to CountIssuesByRepo: %w", err)
+ }
+
+ countMap := make(map[int64]int64, len(countsSlice))
+ for _, c := range countsSlice {
+ countMap[c.RepoID] = c.Count
+ }
+ return countMap, nil
+}
+
+// CountIssues number return of issues by given conditions.
+func CountIssues(ctx context.Context, opts *IssuesOptions, otherConds ...builder.Cond) (int64, error) {
+ sess := db.GetEngine(ctx).
+ Select("COUNT(issue.id) AS count").
+ Table("issue").
+ Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
+ applyConditions(sess, opts)
+
+ for _, cond := range otherConds {
+ sess.And(cond)
+ }
+
+ return sess.Count()
+}
+
+// GetIssueStats returns issue statistic information by given conditions.
+func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error) {
+ if len(opts.IssueIDs) <= MaxQueryParameters {
+ return getIssueStatsChunk(ctx, opts, opts.IssueIDs)
+ }
+
+ // If too long a list of IDs is provided, we get the statistics in
+ // smaller chunks and get accumulates. Note: this could potentially
+ // get us invalid results. The alternative is to insert the list of
+ // ids in a temporary table and join from them.
+ accum := &IssueStats{}
+ for i := 0; i < len(opts.IssueIDs); {
+ chunk := i + MaxQueryParameters
+ if chunk > len(opts.IssueIDs) {
+ chunk = len(opts.IssueIDs)
+ }
+ stats, err := getIssueStatsChunk(ctx, opts, opts.IssueIDs[i:chunk])
+ if err != nil {
+ return nil, err
+ }
+ accum.OpenCount += stats.OpenCount
+ accum.ClosedCount += stats.ClosedCount
+ accum.YourRepositoriesCount += stats.YourRepositoriesCount
+ accum.AssignCount += stats.AssignCount
+ accum.CreateCount += stats.CreateCount
+ accum.MentionCount += stats.MentionCount
+ accum.ReviewRequestedCount += stats.ReviewRequestedCount
+ accum.ReviewedCount += stats.ReviewedCount
+ i = chunk
+ }
+ return accum, nil
+}
+
+func getIssueStatsChunk(ctx context.Context, opts *IssuesOptions, issueIDs []int64) (*IssueStats, error) {
+ stats := &IssueStats{}
+
+ sess := db.GetEngine(ctx).
+ Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
+
+ var err error
+ stats.OpenCount, err = applyIssuesOptions(sess, opts, issueIDs).
+ And("issue.is_closed = ?", false).
+ Count(new(Issue))
+ if err != nil {
+ return stats, err
+ }
+ stats.ClosedCount, err = applyIssuesOptions(sess, opts, issueIDs).
+ And("issue.is_closed = ?", true).
+ Count(new(Issue))
+ return stats, err
+}
+
+func applyIssuesOptions(sess *xorm.Session, opts *IssuesOptions, issueIDs []int64) *xorm.Session {
+ if len(opts.RepoIDs) > 1 {
+ sess.In("issue.repo_id", opts.RepoIDs)
+ } else if len(opts.RepoIDs) == 1 {
+ sess.And("issue.repo_id = ?", opts.RepoIDs[0])
+ }
+
+ if len(issueIDs) > 0 {
+ sess.In("issue.id", issueIDs)
+ }
+
+ applyLabelsCondition(sess, opts)
+
+ applyMilestoneCondition(sess, opts)
+
+ applyProjectCondition(sess, opts)
+
+ if opts.AssigneeID > 0 {
+ applyAssigneeCondition(sess, opts.AssigneeID)
+ } else if opts.AssigneeID == db.NoConditionID {
+ sess.Where("issue.id NOT IN (SELECT issue_id FROM issue_assignees)")
+ }
+
+ if opts.PosterID > 0 {
+ applyPosterCondition(sess, opts.PosterID)
+ }
+
+ if opts.MentionedID > 0 {
+ applyMentionedCondition(sess, opts.MentionedID)
+ }
+
+ if opts.ReviewRequestedID > 0 {
+ applyReviewRequestedCondition(sess, opts.ReviewRequestedID)
+ }
+
+ if opts.ReviewedID > 0 {
+ applyReviewedCondition(sess, opts.ReviewedID)
+ }
+
+ if opts.IsPull.Has() {
+ sess.And("issue.is_pull=?", opts.IsPull.Value())
+ }
+
+ return sess
+}
+
+// CountOrphanedIssues count issues without a repo
+func CountOrphanedIssues(ctx context.Context) (int64, error) {
+ return db.GetEngine(ctx).
+ Table("issue").
+ Join("LEFT", "repository", "issue.repo_id=repository.id").
+ Where(builder.IsNull{"repository.id"}).
+ Select("COUNT(`issue`.`id`)").
+ Count()
+}
diff --git a/models/issues/issue_stats_test.go b/models/issues/issue_stats_test.go
new file mode 100644
index 0000000..fda75a6
--- /dev/null
+++ b/models/issues/issue_stats_test.go
@@ -0,0 +1,34 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetIssueStats(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ ids, err := issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ stats, err := issues_model.GetIssueStats(db.DefaultContext, &issues_model.IssuesOptions{IssueIDs: ids})
+ require.NoError(t, err)
+
+ assert.Equal(t, int64(4), stats.OpenCount)
+ assert.Equal(t, int64(1), stats.ClosedCount)
+ assert.Equal(t, int64(0), stats.YourRepositoriesCount)
+ assert.Equal(t, int64(0), stats.AssignCount)
+ assert.Equal(t, int64(0), stats.CreateCount)
+ assert.Equal(t, int64(0), stats.MentionCount)
+ assert.Equal(t, int64(0), stats.ReviewRequestedCount)
+ assert.Equal(t, int64(0), stats.ReviewedCount)
+}
diff --git a/models/issues/issue_test.go b/models/issues/issue_test.go
new file mode 100644
index 0000000..580be96
--- /dev/null
+++ b/models/issues/issue_test.go
@@ -0,0 +1,498 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "context"
+ "fmt"
+ "sort"
+ "sync"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "xorm.io/builder"
+)
+
+func TestIssue_ReplaceLabels(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testSuccess := func(issueID int64, labelIDs, expectedLabelIDs []int64) {
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issueID})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+
+ labels := make([]*issues_model.Label, len(labelIDs))
+ for i, labelID := range labelIDs {
+ labels[i] = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID, RepoID: repo.ID})
+ }
+ require.NoError(t, issues_model.ReplaceIssueLabels(db.DefaultContext, issue, labels, doer))
+ unittest.AssertCount(t, &issues_model.IssueLabel{IssueID: issueID}, len(expectedLabelIDs))
+ for _, labelID := range expectedLabelIDs {
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issueID, LabelID: labelID})
+ }
+ }
+
+ testSuccess(1, []int64{2}, []int64{2})
+ testSuccess(1, []int64{1, 2}, []int64{1, 2})
+ testSuccess(1, []int64{}, []int64{})
+
+ // mutually exclusive scoped labels 7 and 8
+ testSuccess(18, []int64{6, 7}, []int64{6, 7})
+ testSuccess(18, []int64{7, 8}, []int64{8})
+ testSuccess(18, []int64{6, 8, 7}, []int64{6, 7})
+}
+
+func Test_GetIssueIDsByRepoID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ ids, err := issues_model.GetIssueIDsByRepoID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.Len(t, ids, 5)
+}
+
+func TestIssueAPIURL(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ err := issue.LoadAttributes(db.DefaultContext)
+
+ require.NoError(t, err)
+ assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/issues/1", issue.APIURL(db.DefaultContext))
+}
+
+func TestGetIssuesByIDs(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ testSuccess := func(expectedIssueIDs, nonExistentIssueIDs []int64) {
+ issues, err := issues_model.GetIssuesByIDs(db.DefaultContext, append(expectedIssueIDs, nonExistentIssueIDs...), true)
+ require.NoError(t, err)
+ actualIssueIDs := make([]int64, len(issues))
+ for i, issue := range issues {
+ actualIssueIDs[i] = issue.ID
+ }
+ assert.Equal(t, expectedIssueIDs, actualIssueIDs)
+ }
+ testSuccess([]int64{1, 2, 3}, []int64{})
+ testSuccess([]int64{1, 2, 3}, []int64{unittest.NonexistentID})
+ testSuccess([]int64{3, 2, 1}, []int64{})
+}
+
+func TestGetParticipantIDsByIssue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ checkParticipants := func(issueID int64, userIDs []int) {
+ issue, err := issues_model.GetIssueByID(db.DefaultContext, issueID)
+ require.NoError(t, err)
+
+ participants, err := issue.GetParticipantIDsByIssue(db.DefaultContext)
+ require.NoError(t, err)
+
+ participantsIDs := make([]int, len(participants))
+ for i, uid := range participants {
+ participantsIDs[i] = int(uid)
+ }
+ sort.Ints(participantsIDs)
+ sort.Ints(userIDs)
+ assert.Equal(t, userIDs, participantsIDs)
+ }
+
+ // User 1 is issue1 poster (see fixtures/issue.yml)
+ // User 2 only labeled issue1 (see fixtures/comment.yml)
+ // Users 3 and 5 made actual comments (see fixtures/comment.yml)
+ // User 3 is inactive, thus not active participant
+ checkParticipants(1, []int{1, 5})
+}
+
+func TestIssue_ClearLabels(t *testing.T) {
+ tests := []struct {
+ issueID int64
+ doerID int64
+ }{
+ {1, 2}, // non-pull-request, has labels
+ {2, 2}, // pull-request, has labels
+ {3, 2}, // pull-request, has no labels
+ }
+ for _, test := range tests {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: test.issueID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: test.doerID})
+ require.NoError(t, issues_model.ClearIssueLabels(db.DefaultContext, issue, doer))
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: test.issueID})
+ }
+}
+
+func TestUpdateIssueCols(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{})
+
+ const newTitle = "New Title for unit test"
+ issue.Title = newTitle
+
+ prevContent := issue.Content
+ issue.Content = "This should have no effect"
+
+ now := time.Now().Unix()
+ require.NoError(t, issues_model.UpdateIssueCols(db.DefaultContext, issue, "name"))
+ then := time.Now().Unix()
+
+ updatedIssue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue.ID})
+ assert.EqualValues(t, newTitle, updatedIssue.Title)
+ assert.EqualValues(t, prevContent, updatedIssue.Content)
+ unittest.AssertInt64InRange(t, now, then, int64(updatedIssue.UpdatedUnix))
+}
+
+func TestIssues(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ for _, test := range []struct {
+ Opts issues_model.IssuesOptions
+ ExpectedIssueIDs []int64
+ }{
+ {
+ issues_model.IssuesOptions{
+ AssigneeID: 1,
+ SortType: "oldest",
+ },
+ []int64{1, 6},
+ },
+ {
+ issues_model.IssuesOptions{
+ RepoCond: builder.In("repo_id", 1, 3),
+ SortType: "oldest",
+ Paginator: &db.ListOptions{
+ Page: 1,
+ PageSize: 4,
+ },
+ },
+ []int64{1, 2, 3, 5},
+ },
+ {
+ issues_model.IssuesOptions{
+ LabelIDs: []int64{1},
+ Paginator: &db.ListOptions{
+ Page: 1,
+ PageSize: 4,
+ },
+ },
+ []int64{2, 1},
+ },
+ {
+ issues_model.IssuesOptions{
+ LabelIDs: []int64{1, 2},
+ Paginator: &db.ListOptions{
+ Page: 1,
+ PageSize: 4,
+ },
+ },
+ []int64{}, // issues with **both** label 1 and 2, none of these issues matches, TODO: add more tests
+ },
+ {
+ issues_model.IssuesOptions{
+ LabelIDs: []int64{-1, 2},
+ },
+ []int64{5}, // issue without label 1 but with label 2.
+ },
+ {
+ issues_model.IssuesOptions{
+ RepoCond: builder.In("repo_id", 1),
+ LabelIDs: []int64{0},
+ },
+ []int64{11, 3}, // issues without any label (ordered by creation date desc.)(note: 11 is a pull request)
+ },
+ {
+ issues_model.IssuesOptions{
+ MilestoneIDs: []int64{1},
+ },
+ []int64{2},
+ },
+ } {
+ issues, err := issues_model.Issues(db.DefaultContext, &test.Opts)
+ require.NoError(t, err)
+ if assert.Len(t, issues, len(test.ExpectedIssueIDs)) {
+ for i, issue := range issues {
+ assert.EqualValues(t, test.ExpectedIssueIDs[i], issue.ID)
+ }
+ }
+ }
+}
+
+func TestIssue_loadTotalTimes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ ms, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+ require.NoError(t, ms.LoadTotalTimes(db.DefaultContext))
+ assert.Equal(t, int64(3682), ms.TotalTrackedTime)
+}
+
+func testInsertIssue(t *testing.T, title, content string, expectIndex int64) *issues_model.Issue {
+ var newIssue issues_model.Issue
+ t.Run(title, func(t *testing.T) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ issue := issues_model.Issue{
+ RepoID: repo.ID,
+ PosterID: user.ID,
+ Poster: user,
+ Title: title,
+ Content: content,
+ }
+ err := issues_model.NewIssue(db.DefaultContext, repo, &issue, nil, nil)
+ require.NoError(t, err)
+
+ has, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Get(&newIssue)
+ require.NoError(t, err)
+ assert.True(t, has)
+ assert.EqualValues(t, issue.Title, newIssue.Title)
+ assert.EqualValues(t, issue.Content, newIssue.Content)
+ if expectIndex > 0 {
+ assert.EqualValues(t, expectIndex, newIssue.Index)
+ }
+ })
+ return &newIssue
+}
+
+func TestIssue_InsertIssue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // there are 5 issues and max index is 5 on repository 1, so this one should 6
+ issue := testInsertIssue(t, "my issue1", "special issue's comments?", 6)
+ _, err := db.DeleteByID[issues_model.Issue](db.DefaultContext, issue.ID)
+ require.NoError(t, err)
+
+ issue = testInsertIssue(t, `my issue2, this is my son's love \n \r \ `, "special issue's '' comments?", 7)
+ _, err = db.DeleteByID[issues_model.Issue](db.DefaultContext, issue.ID)
+ require.NoError(t, err)
+}
+
+func TestIssue_ResolveMentions(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ testSuccess := func(owner, repo, doer string, mentions []string, expected []int64) {
+ o := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: owner})
+ r := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: o.ID, LowerName: repo})
+ issue := &issues_model.Issue{RepoID: r.ID}
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: doer})
+ resolved, err := issues_model.ResolveIssueMentionsByVisibility(db.DefaultContext, issue, d, mentions)
+ require.NoError(t, err)
+ ids := make([]int64, len(resolved))
+ for i, user := range resolved {
+ ids[i] = user.ID
+ }
+ sort.Slice(ids, func(i, j int) bool { return ids[i] < ids[j] })
+ assert.EqualValues(t, expected, ids)
+ }
+
+ // Public repo, existing user
+ testSuccess("user2", "repo1", "user1", []string{"user5"}, []int64{5})
+ // Public repo, non-existing user
+ testSuccess("user2", "repo1", "user1", []string{"nonexisting"}, []int64{})
+ // Public repo, doer
+ testSuccess("user2", "repo1", "user1", []string{"user1"}, []int64{})
+ // Public repo, blocked user
+ testSuccess("user2", "repo1", "user1", []string{"user4"}, []int64{})
+ // Private repo, team member
+ testSuccess("org17", "big_test_private_4", "user20", []string{"user2"}, []int64{2})
+ // Private repo, not a team member
+ testSuccess("org17", "big_test_private_4", "user20", []string{"user5"}, []int64{})
+ // Private repo, whole team
+ testSuccess("org17", "big_test_private_4", "user15", []string{"org17/owners"}, []int64{18})
+}
+
+func TestResourceIndex(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ beforeCount, err := issues_model.CountIssues(context.Background(), &issues_model.IssuesOptions{})
+ require.NoError(t, err)
+
+ var wg sync.WaitGroup
+ for i := 0; i < 100; i++ {
+ wg.Add(1)
+ t.Run(fmt.Sprintf("issue %d", i+1), func(t *testing.T) {
+ t.Parallel()
+ testInsertIssue(t, fmt.Sprintf("issue %d", i+1), "my issue", 0)
+ wg.Done()
+ })
+ }
+
+ t.Run("Check the count", func(t *testing.T) {
+ t.Parallel()
+
+ wg.Wait()
+ afterCount, err := issues_model.CountIssues(context.Background(), &issues_model.IssuesOptions{})
+ require.NoError(t, err)
+ assert.EqualValues(t, 100, afterCount-beforeCount)
+ })
+}
+
+func TestCorrectIssueStats(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Because the condition is to have chunked database look-ups,
+ // We have to more issues than `maxQueryParameters`, we will insert.
+ // maxQueryParameters + 10 issues into the testDatabase.
+ // Each new issues will have a constant description "Bugs are nasty"
+ // Which will be used later on.
+
+ issueAmount := issues_model.MaxQueryParameters + 10
+
+ var wg sync.WaitGroup
+ for i := 0; i < issueAmount; i++ {
+ wg.Add(1)
+ go func(i int) {
+ testInsertIssue(t, fmt.Sprintf("Issue %d", i+1), "Bugs are nasty", 0)
+ wg.Done()
+ }(i)
+ }
+ wg.Wait()
+
+ // Now we will get all issueID's that match the "Bugs are nasty" query.
+ issues, err := issues_model.Issues(context.TODO(), &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{
+ PageSize: issueAmount,
+ },
+ RepoIDs: []int64{1},
+ })
+ total := int64(len(issues))
+ var ids []int64
+ for _, issue := range issues {
+ if issue.Content == "Bugs are nasty" {
+ ids = append(ids, issue.ID)
+ }
+ }
+
+ // Just to be sure.
+ require.NoError(t, err)
+ assert.EqualValues(t, issueAmount, total)
+
+ // Now we will call the GetIssueStats with these IDs and if working,
+ // get the correct stats back.
+ issueStats, err := issues_model.GetIssueStats(db.DefaultContext, &issues_model.IssuesOptions{
+ RepoIDs: []int64{1},
+ IssueIDs: ids,
+ })
+
+ // Now check the values.
+ require.NoError(t, err)
+ assert.EqualValues(t, issueStats.OpenCount, issueAmount)
+}
+
+func TestMilestoneList_LoadTotalTrackedTimes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ miles := issues_model.MilestoneList{
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}),
+ }
+
+ require.NoError(t, miles.LoadTotalTrackedTimes(db.DefaultContext))
+
+ assert.Equal(t, int64(3682), miles[0].TotalTrackedTime)
+}
+
+func TestLoadTotalTrackedTime(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
+
+ require.NoError(t, milestone.LoadTotalTrackedTime(db.DefaultContext))
+
+ assert.Equal(t, int64(3682), milestone.TotalTrackedTime)
+}
+
+func TestCountIssues(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ count, err := issues_model.CountIssues(db.DefaultContext, &issues_model.IssuesOptions{})
+ require.NoError(t, err)
+ assert.EqualValues(t, 22, count)
+}
+
+func TestIssueLoadAttributes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ setting.Service.EnableTimetracking = true
+
+ issueList := issues_model.IssueList{
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1}),
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 4}),
+ }
+
+ for _, issue := range issueList {
+ require.NoError(t, issue.LoadAttributes(db.DefaultContext))
+ assert.EqualValues(t, issue.RepoID, issue.Repo.ID)
+ for _, label := range issue.Labels {
+ assert.EqualValues(t, issue.RepoID, label.RepoID)
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID})
+ }
+ if issue.PosterID > 0 {
+ assert.EqualValues(t, issue.PosterID, issue.Poster.ID)
+ }
+ if issue.AssigneeID > 0 {
+ assert.EqualValues(t, issue.AssigneeID, issue.Assignee.ID)
+ }
+ if issue.MilestoneID > 0 {
+ assert.EqualValues(t, issue.MilestoneID, issue.Milestone.ID)
+ }
+ if issue.IsPull {
+ assert.EqualValues(t, issue.ID, issue.PullRequest.IssueID)
+ }
+ for _, attachment := range issue.Attachments {
+ assert.EqualValues(t, issue.ID, attachment.IssueID)
+ }
+ for _, comment := range issue.Comments {
+ assert.EqualValues(t, issue.ID, comment.IssueID)
+ }
+ if issue.ID == int64(1) {
+ assert.Equal(t, int64(400), issue.TotalTrackedTime)
+ assert.NotNil(t, issue.Project)
+ assert.Equal(t, int64(1), issue.Project.ID)
+ } else {
+ assert.Nil(t, issue.Project)
+ }
+ }
+}
+
+func assertCreateIssues(t *testing.T, isPull bool) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ reponame := "repo1"
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame})
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
+ assert.EqualValues(t, 1, milestone.ID)
+ reaction := &issues_model.Reaction{
+ Type: "heart",
+ UserID: owner.ID,
+ }
+
+ title := "issuetitle1"
+ is := &issues_model.Issue{
+ RepoID: repo.ID,
+ MilestoneID: milestone.ID,
+ Repo: repo,
+ Title: title,
+ Content: "issuecontent1",
+ IsPull: isPull,
+ PosterID: owner.ID,
+ Poster: owner,
+ IsClosed: true,
+ Labels: []*issues_model.Label{label},
+ Reactions: []*issues_model.Reaction{reaction},
+ }
+ err := issues_model.InsertIssues(db.DefaultContext, is)
+ require.NoError(t, err)
+
+ i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{Title: title})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Reaction{Type: "heart", UserID: owner.ID, IssueID: i.ID})
+}
+
+func TestMigrate_CreateIssuesIsPullFalse(t *testing.T) {
+ assertCreateIssues(t, false)
+}
+
+func TestMigrate_CreateIssuesIsPullTrue(t *testing.T) {
+ assertCreateIssues(t, true)
+}
diff --git a/models/issues/issue_update.go b/models/issues/issue_update.go
new file mode 100644
index 0000000..dbfd2fc
--- /dev/null
+++ b/models/issues/issue_update.go
@@ -0,0 +1,795 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/references"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "xorm.io/builder"
+)
+
+func UpdateIssueCols(ctx context.Context, issue *Issue, cols ...string) error {
+ _, err := UpdateIssueColsWithCond(ctx, issue, builder.NewCond(), cols...)
+ return err
+}
+
+func UpdateIssueColsWithCond(ctx context.Context, issue *Issue, cond builder.Cond, cols ...string) (int64, error) {
+ sess := db.GetEngine(ctx).ID(issue.ID)
+ if issue.NoAutoTime {
+ cols = append(cols, []string{"updated_unix"}...)
+ sess.NoAutoTime()
+ }
+ return sess.Cols(cols...).Where(cond).Update(issue)
+}
+
+func changeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isClosed, isMergePull bool) (*Comment, error) {
+ // Reload the issue
+ currentIssue, err := GetIssueByID(ctx, issue.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ // Nothing should be performed if current status is same as target status
+ if currentIssue.IsClosed == isClosed {
+ if !issue.IsPull {
+ return nil, ErrIssueWasClosed{
+ ID: issue.ID,
+ }
+ }
+ return nil, ErrPullWasClosed{
+ ID: issue.ID,
+ }
+ }
+
+ issue.IsClosed = isClosed
+ return doChangeIssueStatus(ctx, issue, doer, isMergePull)
+}
+
+func doChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isMergePull bool) (*Comment, error) {
+ // Check for open dependencies
+ if issue.IsClosed && issue.Repo.IsDependenciesEnabled(ctx) {
+ // only check if dependencies are enabled and we're about to close an issue, otherwise reopening an issue would fail when there are unsatisfied dependencies
+ noDeps, err := IssueNoDependenciesLeft(ctx, issue)
+ if err != nil {
+ return nil, err
+ }
+
+ if !noDeps {
+ return nil, ErrDependenciesLeft{issue.ID}
+ }
+ }
+
+ if issue.IsClosed {
+ if issue.NoAutoTime {
+ issue.ClosedUnix = issue.UpdatedUnix
+ } else {
+ issue.ClosedUnix = timeutil.TimeStampNow()
+ }
+ } else {
+ issue.ClosedUnix = 0
+ }
+
+ if err := UpdateIssueCols(ctx, issue, "is_closed", "closed_unix"); err != nil {
+ return nil, err
+ }
+
+ // Update issue count of labels
+ if err := issue.LoadLabels(ctx); err != nil {
+ return nil, err
+ }
+ for idx := range issue.Labels {
+ if err := updateLabelCols(ctx, issue.Labels[idx], "num_issues", "num_closed_issue"); err != nil {
+ return nil, err
+ }
+ }
+
+ // Update issue count of milestone
+ if issue.MilestoneID > 0 {
+ if issue.NoAutoTime {
+ if err := UpdateMilestoneCountersWithDate(ctx, issue.MilestoneID, issue.UpdatedUnix); err != nil {
+ return nil, err
+ }
+ } else {
+ if err := UpdateMilestoneCounters(ctx, issue.MilestoneID); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ // update repository's issue closed number
+ if err := repo_model.UpdateRepoIssueNumbers(ctx, issue.RepoID, issue.IsPull, true); err != nil {
+ return nil, err
+ }
+
+ // New action comment
+ cmtType := CommentTypeClose
+ if !issue.IsClosed {
+ cmtType = CommentTypeReopen
+ } else if isMergePull {
+ cmtType = CommentTypeMergePull
+ }
+
+ return CreateComment(ctx, &CreateCommentOptions{
+ Type: cmtType,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ })
+}
+
+// ChangeIssueStatus changes issue status to open or closed.
+func ChangeIssueStatus(ctx context.Context, issue *Issue, doer *user_model.User, isClosed bool) (*Comment, error) {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+ if err := issue.LoadPoster(ctx); err != nil {
+ return nil, err
+ }
+
+ return changeIssueStatus(ctx, issue, doer, isClosed, false)
+}
+
+// ChangeIssueTitle changes the title of this issue, as the given user.
+func ChangeIssueTitle(ctx context.Context, issue *Issue, doer *user_model.User, oldTitle string) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = UpdateIssueCols(ctx, issue, "name"); err != nil {
+ return fmt.Errorf("updateIssueCols: %w", err)
+ }
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return fmt.Errorf("loadRepo: %w", err)
+ }
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeChangeTitle,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldTitle: oldTitle,
+ NewTitle: issue.Title,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return fmt.Errorf("createComment: %w", err)
+ }
+ if err = issue.AddCrossReferences(ctx, doer, true); err != nil {
+ return fmt.Errorf("addCrossReferences: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// ChangeIssueRef changes the branch of this issue, as the given user.
+func ChangeIssueRef(ctx context.Context, issue *Issue, doer *user_model.User, oldRef string) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err = UpdateIssueCols(ctx, issue, "ref"); err != nil {
+ return fmt.Errorf("updateIssueCols: %w", err)
+ }
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ return fmt.Errorf("loadRepo: %w", err)
+ }
+ oldRefFriendly := strings.TrimPrefix(oldRef, git.BranchPrefix)
+ newRefFriendly := strings.TrimPrefix(issue.Ref, git.BranchPrefix)
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeChangeIssueRef,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ OldRef: oldRefFriendly,
+ NewRef: newRefFriendly,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return fmt.Errorf("createComment: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// AddDeletePRBranchComment adds delete branch comment for pull request issue
+func AddDeletePRBranchComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issueID int64, branchName string) error {
+ issue, err := GetIssueByID(ctx, issueID)
+ if err != nil {
+ return err
+ }
+ opts := &CreateCommentOptions{
+ Type: CommentTypeDeleteBranch,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ OldRef: branchName,
+ }
+ _, err = CreateComment(ctx, opts)
+ return err
+}
+
+// UpdateIssueAttachments update attachments by UUIDs for the issue
+func UpdateIssueAttachments(ctx context.Context, issueID int64, uuids []string) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, uuids)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", uuids, err)
+ }
+ for i := 0; i < len(attachments); i++ {
+ attachments[i].IssueID = issueID
+ if err := repo_model.UpdateAttachment(ctx, attachments[i]); err != nil {
+ return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
+ }
+ }
+ return committer.Commit()
+}
+
+// ChangeIssueContent changes issue content, as the given user.
+func ChangeIssueContent(ctx context.Context, issue *Issue, doer *user_model.User, content string, contentVersion int) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ hasContentHistory, err := HasIssueContentHistory(ctx, issue.ID, 0)
+ if err != nil {
+ return fmt.Errorf("HasIssueContentHistory: %w", err)
+ }
+ if !hasContentHistory {
+ if err = SaveIssueContentHistory(ctx, issue.PosterID, issue.ID, 0,
+ issue.CreatedUnix, issue.Content, true); err != nil {
+ return fmt.Errorf("SaveIssueContentHistory: %w", err)
+ }
+ }
+
+ issue.Content = content
+ issue.ContentVersion = contentVersion + 1
+
+ expectedContentVersion := builder.NewCond().And(builder.Eq{"content_version": contentVersion})
+ affected, err := UpdateIssueColsWithCond(ctx, issue, expectedContentVersion, "content", "content_version")
+ if err != nil {
+ return fmt.Errorf("UpdateIssueCols: %w", err)
+ }
+ if affected == 0 {
+ return ErrIssueAlreadyChanged
+ }
+
+ historyDate := timeutil.TimeStampNow()
+ if issue.NoAutoTime {
+ historyDate = issue.UpdatedUnix
+ }
+ if err = SaveIssueContentHistory(ctx, doer.ID, issue.ID, 0,
+ historyDate, issue.Content, false); err != nil {
+ return fmt.Errorf("SaveIssueContentHistory: %w", err)
+ }
+
+ if err = issue.AddCrossReferences(ctx, doer, true); err != nil {
+ return fmt.Errorf("addCrossReferences: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// NewIssueOptions represents the options of a new issue.
+type NewIssueOptions struct {
+ Repo *repo_model.Repository
+ Issue *Issue
+ LabelIDs []int64
+ Attachments []string // In UUID format.
+ IsPull bool
+}
+
+// NewIssueWithIndex creates issue with given index
+func NewIssueWithIndex(ctx context.Context, doer *user_model.User, opts NewIssueOptions) (err error) {
+ e := db.GetEngine(ctx)
+ opts.Issue.Title = strings.TrimSpace(opts.Issue.Title)
+
+ if opts.Issue.MilestoneID > 0 {
+ milestone, err := GetMilestoneByRepoID(ctx, opts.Issue.RepoID, opts.Issue.MilestoneID)
+ if err != nil && !IsErrMilestoneNotExist(err) {
+ return fmt.Errorf("getMilestoneByID: %w", err)
+ }
+
+ // Assume milestone is invalid and drop silently.
+ opts.Issue.MilestoneID = 0
+ if milestone != nil {
+ opts.Issue.MilestoneID = milestone.ID
+ opts.Issue.Milestone = milestone
+ }
+ }
+
+ if opts.Issue.Index <= 0 {
+ return fmt.Errorf("no issue index provided")
+ }
+ if opts.Issue.ID > 0 {
+ return fmt.Errorf("issue exist")
+ }
+
+ opts.Issue.Created = timeutil.TimeStampNanoNow()
+
+ if _, err := e.Insert(opts.Issue); err != nil {
+ return err
+ }
+
+ if opts.Issue.MilestoneID > 0 {
+ if err := UpdateMilestoneCounters(ctx, opts.Issue.MilestoneID); err != nil {
+ return err
+ }
+
+ opts := &CreateCommentOptions{
+ Type: CommentTypeMilestone,
+ Doer: doer,
+ Repo: opts.Repo,
+ Issue: opts.Issue,
+ OldMilestoneID: 0,
+ MilestoneID: opts.Issue.MilestoneID,
+ }
+ if _, err = CreateComment(ctx, opts); err != nil {
+ return err
+ }
+ }
+
+ if err := repo_model.UpdateRepoIssueNumbers(ctx, opts.Issue.RepoID, opts.IsPull, false); err != nil {
+ return err
+ }
+
+ if len(opts.LabelIDs) > 0 {
+ // During the session, SQLite3 driver cannot handle retrieve objects after update something.
+ // So we have to get all needed labels first.
+ labels := make([]*Label, 0, len(opts.LabelIDs))
+ if err = e.In("id", opts.LabelIDs).Find(&labels); err != nil {
+ return fmt.Errorf("find all labels [label_ids: %v]: %w", opts.LabelIDs, err)
+ }
+
+ if err = opts.Issue.LoadPoster(ctx); err != nil {
+ return err
+ }
+
+ for _, label := range labels {
+ // Silently drop invalid labels.
+ if label.RepoID != opts.Repo.ID && label.OrgID != opts.Repo.OwnerID {
+ continue
+ }
+
+ if err = newIssueLabel(ctx, opts.Issue, label, opts.Issue.Poster); err != nil {
+ return fmt.Errorf("addLabel [id: %d]: %w", label.ID, err)
+ }
+ }
+ }
+
+ if err = NewIssueUsers(ctx, opts.Repo, opts.Issue); err != nil {
+ return err
+ }
+
+ if len(opts.Attachments) > 0 {
+ attachments, err := repo_model.GetAttachmentsByUUIDs(ctx, opts.Attachments)
+ if err != nil {
+ return fmt.Errorf("getAttachmentsByUUIDs [uuids: %v]: %w", opts.Attachments, err)
+ }
+
+ for i := 0; i < len(attachments); i++ {
+ attachments[i].IssueID = opts.Issue.ID
+ if _, err = e.ID(attachments[i].ID).Update(attachments[i]); err != nil {
+ return fmt.Errorf("update attachment [id: %d]: %w", attachments[i].ID, err)
+ }
+ }
+ }
+ if err = opts.Issue.LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ return opts.Issue.AddCrossReferences(ctx, doer, false)
+}
+
+// NewIssue creates new issue with labels for repository.
+func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *Issue, labelIDs []int64, uuids []string) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", repo.ID)
+ if err != nil {
+ return fmt.Errorf("generate issue index failed: %w", err)
+ }
+
+ issue.Index = idx
+
+ if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
+ Repo: repo,
+ Issue: issue,
+ LabelIDs: labelIDs,
+ Attachments: uuids,
+ }); err != nil {
+ if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) || IsErrNewIssueInsert(err) {
+ return err
+ }
+ return fmt.Errorf("newIssue: %w", err)
+ }
+
+ if err = committer.Commit(); err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ return nil
+}
+
+// UpdateIssueMentions updates issue-user relations for mentioned users.
+func UpdateIssueMentions(ctx context.Context, issueID int64, mentions []*user_model.User) error {
+ if len(mentions) == 0 {
+ return nil
+ }
+ ids := make([]int64, len(mentions))
+ for i, u := range mentions {
+ ids[i] = u.ID
+ }
+ if err := UpdateIssueUsersByMentions(ctx, issueID, ids); err != nil {
+ return fmt.Errorf("UpdateIssueUsersByMentions: %w", err)
+ }
+ return nil
+}
+
+// UpdateIssueDeadline updates an issue deadline and adds comments. Setting a deadline to 0 means deleting it.
+func UpdateIssueDeadline(ctx context.Context, issue *Issue, deadlineUnix timeutil.TimeStamp, doer *user_model.User) (err error) {
+ // if the deadline hasn't changed do nothing
+ if issue.DeadlineUnix == deadlineUnix {
+ return nil
+ }
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Update the deadline
+ if err = UpdateIssueCols(ctx, &Issue{ID: issue.ID, DeadlineUnix: deadlineUnix, NoAutoTime: issue.NoAutoTime, UpdatedUnix: issue.UpdatedUnix}, "deadline_unix"); err != nil {
+ return err
+ }
+
+ // Make the comment
+ if _, err = createDeadlineComment(ctx, doer, issue, deadlineUnix); err != nil {
+ return fmt.Errorf("createRemovedDueDateComment: %w", err)
+ }
+
+ return committer.Commit()
+}
+
+// FindAndUpdateIssueMentions finds users mentioned in the given content string, and saves them in the database.
+func FindAndUpdateIssueMentions(ctx context.Context, issue *Issue, doer *user_model.User, content string) (mentions []*user_model.User, err error) {
+ rawMentions := references.FindAllMentionsMarkdown(content)
+ mentions, err = ResolveIssueMentionsByVisibility(ctx, issue, doer, rawMentions)
+ if err != nil {
+ return nil, fmt.Errorf("UpdateIssueMentions [%d]: %w", issue.ID, err)
+ }
+ if err = UpdateIssueMentions(ctx, issue.ID, mentions); err != nil {
+ return nil, fmt.Errorf("UpdateIssueMentions [%d]: %w", issue.ID, err)
+ }
+ return mentions, err
+}
+
+// ResolveIssueMentionsByVisibility returns the users mentioned in an issue, removing those that
+// don't have access to reading it. Teams are expanded into their users, but organizations are ignored.
+func ResolveIssueMentionsByVisibility(ctx context.Context, issue *Issue, doer *user_model.User, mentions []string) (users []*user_model.User, err error) {
+ if len(mentions) == 0 {
+ return nil, nil
+ }
+ if err = issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ resolved := make(map[string]bool, 10)
+ var mentionTeams []string
+
+ if err := issue.Repo.LoadOwner(ctx); err != nil {
+ return nil, err
+ }
+
+ repoOwnerIsOrg := issue.Repo.Owner.IsOrganization()
+ if repoOwnerIsOrg {
+ mentionTeams = make([]string, 0, 5)
+ }
+
+ resolved[doer.LowerName] = true
+ for _, name := range mentions {
+ name := strings.ToLower(name)
+ if _, ok := resolved[name]; ok {
+ continue
+ }
+ if repoOwnerIsOrg && strings.Contains(name, "/") {
+ names := strings.Split(name, "/")
+ if len(names) < 2 || names[0] != issue.Repo.Owner.LowerName {
+ continue
+ }
+ mentionTeams = append(mentionTeams, names[1])
+ resolved[name] = true
+ } else {
+ resolved[name] = false
+ }
+ }
+
+ if issue.Repo.Owner.IsOrganization() && len(mentionTeams) > 0 {
+ teams := make([]*organization.Team, 0, len(mentionTeams))
+ if err := db.GetEngine(ctx).
+ Join("INNER", "team_repo", "team_repo.team_id = team.id").
+ Where("team_repo.repo_id=?", issue.Repo.ID).
+ In("team.lower_name", mentionTeams).
+ Find(&teams); err != nil {
+ return nil, fmt.Errorf("find mentioned teams: %w", err)
+ }
+ if len(teams) != 0 {
+ checked := make([]int64, 0, len(teams))
+ unittype := unit.TypeIssues
+ if issue.IsPull {
+ unittype = unit.TypePullRequests
+ }
+ for _, team := range teams {
+ if team.AccessMode >= perm.AccessModeAdmin {
+ checked = append(checked, team.ID)
+ resolved[issue.Repo.Owner.LowerName+"/"+team.LowerName] = true
+ continue
+ }
+ has, err := db.GetEngine(ctx).Get(&organization.TeamUnit{OrgID: issue.Repo.Owner.ID, TeamID: team.ID, Type: unittype})
+ if err != nil {
+ return nil, fmt.Errorf("get team units (%d): %w", team.ID, err)
+ }
+ if has {
+ checked = append(checked, team.ID)
+ resolved[issue.Repo.Owner.LowerName+"/"+team.LowerName] = true
+ }
+ }
+ if len(checked) != 0 {
+ teamusers := make([]*user_model.User, 0, 20)
+ if err := db.GetEngine(ctx).
+ Join("INNER", "team_user", "team_user.uid = `user`.id").
+ Join("LEFT", "forgejo_blocked_user", "forgejo_blocked_user.user_id = `user`.id").
+ In("`team_user`.team_id", checked).
+ And("`user`.is_active = ?", true).
+ And("`user`.prohibit_login = ?", false).
+ And(builder.Or(builder.IsNull{"`forgejo_blocked_user`.block_id"}, builder.Neq{"`forgejo_blocked_user`.block_id": doer.ID})).
+ Find(&teamusers); err != nil {
+ return nil, fmt.Errorf("get teams users: %w", err)
+ }
+ if len(teamusers) > 0 {
+ users = make([]*user_model.User, 0, len(teamusers))
+ for _, user := range teamusers {
+ if already, ok := resolved[user.LowerName]; !ok || !already {
+ users = append(users, user)
+ resolved[user.LowerName] = true
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Remove names already in the list to avoid querying the database if pending names remain
+ mentionUsers := make([]string, 0, len(resolved))
+ for name, already := range resolved {
+ if !already {
+ mentionUsers = append(mentionUsers, name)
+ }
+ }
+ if len(mentionUsers) == 0 {
+ return users, err
+ }
+
+ if users == nil {
+ users = make([]*user_model.User, 0, len(mentionUsers))
+ }
+
+ unchecked := make([]*user_model.User, 0, len(mentionUsers))
+ if err := db.GetEngine(ctx).
+ Join("LEFT", "forgejo_blocked_user", "forgejo_blocked_user.user_id = `user`.id").
+ Where("`user`.is_active = ?", true).
+ And("`user`.prohibit_login = ?", false).
+ And(builder.Or(builder.IsNull{"`forgejo_blocked_user`.block_id"}, builder.Neq{"`forgejo_blocked_user`.block_id": doer.ID})).
+ In("`user`.lower_name", mentionUsers).
+ Find(&unchecked); err != nil {
+ return nil, fmt.Errorf("find mentioned users: %w", err)
+ }
+ for _, user := range unchecked {
+ if already := resolved[user.LowerName]; already || user.IsOrganization() {
+ continue
+ }
+ // Normal users must have read access to the referencing issue
+ perm, err := access_model.GetUserRepoPermission(ctx, issue.Repo, user)
+ if err != nil {
+ return nil, fmt.Errorf("GetUserRepoPermission [%d]: %w", user.ID, err)
+ }
+ if !perm.CanReadIssuesOrPulls(issue.IsPull) {
+ continue
+ }
+ users = append(users, user)
+ }
+
+ return users, err
+}
+
+// UpdateIssuesMigrationsByType updates all migrated repositories' issues from gitServiceType to replace originalAuthorID to posterID
+func UpdateIssuesMigrationsByType(ctx context.Context, gitServiceType api.GitServiceType, originalAuthorID string, posterID int64) error {
+ _, err := db.GetEngine(ctx).Table("issue").
+ Where("repo_id IN (SELECT id FROM repository WHERE original_service_type = ?)", gitServiceType).
+ And("original_author_id = ?", originalAuthorID).
+ Update(map[string]any{
+ "poster_id": posterID,
+ "original_author": "",
+ "original_author_id": 0,
+ })
+ return err
+}
+
+// UpdateReactionsMigrationsByType updates all migrated repositories' reactions from gitServiceType to replace originalAuthorID to posterID
+func UpdateReactionsMigrationsByType(ctx context.Context, gitServiceType api.GitServiceType, originalAuthorID string, userID int64) error {
+ _, err := db.GetEngine(ctx).Table("reaction").
+ Where("original_author_id = ?", originalAuthorID).
+ And(migratedIssueCond(gitServiceType)).
+ Update(map[string]any{
+ "user_id": userID,
+ "original_author": "",
+ "original_author_id": 0,
+ })
+ return err
+}
+
+// DeleteIssuesByRepoID deletes issues by repositories id
+func DeleteIssuesByRepoID(ctx context.Context, repoID int64) (attachmentPaths []string, err error) {
+ // MariaDB has a performance bug: https://jira.mariadb.org/browse/MDEV-16289
+ // so here it uses "DELETE ... WHERE IN" with pre-queried IDs.
+ sess := db.GetEngine(ctx)
+
+ for {
+ issueIDs := make([]int64, 0, db.DefaultMaxInSize)
+
+ err := sess.Table(&Issue{}).Where("repo_id = ?", repoID).OrderBy("id").Limit(db.DefaultMaxInSize).Cols("id").Find(&issueIDs)
+ if err != nil {
+ return nil, err
+ }
+
+ if len(issueIDs) == 0 {
+ break
+ }
+
+ // Delete content histories
+ _, err = sess.In("issue_id", issueIDs).Delete(&ContentHistory{})
+ if err != nil {
+ return nil, err
+ }
+
+ // Delete comments and attachments
+ _, err = sess.In("issue_id", issueIDs).Delete(&Comment{})
+ if err != nil {
+ return nil, err
+ }
+
+ // Dependencies for issues in this repository
+ _, err = sess.In("issue_id", issueIDs).Delete(&IssueDependency{})
+ if err != nil {
+ return nil, err
+ }
+
+ // Delete dependencies for issues in other repositories
+ _, err = sess.In("dependency_id", issueIDs).Delete(&IssueDependency{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&IssueUser{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&Reaction{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&IssueWatch{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&Stopwatch{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&TrackedTime{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&project_model.ProjectIssue{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("dependent_issue_id", issueIDs).Delete(&Comment{})
+ if err != nil {
+ return nil, err
+ }
+
+ var attachments []*repo_model.Attachment
+ err = sess.In("issue_id", issueIDs).Find(&attachments)
+ if err != nil {
+ return nil, err
+ }
+
+ for j := range attachments {
+ attachmentPaths = append(attachmentPaths, attachments[j].RelativePath())
+ }
+
+ _, err = sess.In("issue_id", issueIDs).Delete(&repo_model.Attachment{})
+ if err != nil {
+ return nil, err
+ }
+
+ _, err = sess.In("id", issueIDs).Delete(&Issue{})
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ return attachmentPaths, err
+}
+
+// DeleteOrphanedIssues delete issues without a repo
+func DeleteOrphanedIssues(ctx context.Context) error {
+ var attachmentPaths []string
+ err := db.WithTx(ctx, func(ctx context.Context) error {
+ var ids []int64
+
+ if err := db.GetEngine(ctx).Table("issue").Distinct("issue.repo_id").
+ Join("LEFT", "repository", "issue.repo_id=repository.id").
+ Where(builder.IsNull{"repository.id"}).GroupBy("issue.repo_id").
+ Find(&ids); err != nil {
+ return err
+ }
+
+ for i := range ids {
+ paths, err := DeleteIssuesByRepoID(ctx, ids[i])
+ if err != nil {
+ return err
+ }
+ attachmentPaths = append(attachmentPaths, paths...)
+ }
+
+ return nil
+ })
+ if err != nil {
+ return err
+ }
+
+ // Remove issue attachment files.
+ for i := range attachmentPaths {
+ system_model.RemoveAllWithNotice(ctx, "Delete issue attachment", attachmentPaths[i])
+ }
+ return nil
+}
diff --git a/models/issues/issue_user.go b/models/issues/issue_user.go
new file mode 100644
index 0000000..6b59e07
--- /dev/null
+++ b/models/issues/issue_user.go
@@ -0,0 +1,96 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+)
+
+// IssueUser represents an issue-user relation.
+type IssueUser struct {
+ ID int64 `xorm:"pk autoincr"`
+ UID int64 `xorm:"INDEX unique(uid_to_issue)"` // User ID.
+ IssueID int64 `xorm:"INDEX unique(uid_to_issue)"`
+ IsRead bool
+ IsMentioned bool
+}
+
+func init() {
+ db.RegisterModel(new(IssueUser))
+}
+
+// NewIssueUsers inserts an issue related users
+func NewIssueUsers(ctx context.Context, repo *repo_model.Repository, issue *Issue) error {
+ assignees, err := repo_model.GetRepoAssignees(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("getAssignees: %w", err)
+ }
+
+ // Poster can be anyone, append later if not one of assignees.
+ isPosterAssignee := false
+
+ // Leave a seat for poster itself to append later, but if poster is one of assignee
+ // and just waste 1 unit is cheaper than re-allocate memory once.
+ issueUsers := make([]*IssueUser, 0, len(assignees)+1)
+ for _, assignee := range assignees {
+ issueUsers = append(issueUsers, &IssueUser{
+ IssueID: issue.ID,
+ UID: assignee.ID,
+ })
+ isPosterAssignee = isPosterAssignee || assignee.ID == issue.PosterID
+ }
+ if !isPosterAssignee {
+ issueUsers = append(issueUsers, &IssueUser{
+ IssueID: issue.ID,
+ UID: issue.PosterID,
+ })
+ }
+
+ return db.Insert(ctx, issueUsers)
+}
+
+// UpdateIssueUserByRead updates issue-user relation for reading.
+func UpdateIssueUserByRead(ctx context.Context, uid, issueID int64) error {
+ _, err := db.GetEngine(ctx).Exec("UPDATE `issue_user` SET is_read=? WHERE uid=? AND issue_id=?", true, uid, issueID)
+ return err
+}
+
+// UpdateIssueUsersByMentions updates issue-user pairs by mentioning.
+func UpdateIssueUsersByMentions(ctx context.Context, issueID int64, uids []int64) error {
+ for _, uid := range uids {
+ iu := &IssueUser{
+ UID: uid,
+ IssueID: issueID,
+ }
+ has, err := db.GetEngine(ctx).Get(iu)
+ if err != nil {
+ return err
+ }
+
+ iu.IsMentioned = true
+ if has {
+ _, err = db.GetEngine(ctx).ID(iu.ID).Cols("is_mentioned").Update(iu)
+ } else {
+ _, err = db.GetEngine(ctx).Insert(iu)
+ }
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// GetIssueMentionIDs returns all mentioned user IDs of an issue.
+func GetIssueMentionIDs(ctx context.Context, issueID int64) ([]int64, error) {
+ var ids []int64
+ return ids, db.GetEngine(ctx).Table(IssueUser{}).
+ Where("issue_id=?", issueID).
+ And("is_mentioned=?", true).
+ Select("uid").
+ Find(&ids)
+}
diff --git a/models/issues/issue_user_test.go b/models/issues/issue_user_test.go
new file mode 100644
index 0000000..e059e43
--- /dev/null
+++ b/models/issues/issue_user_test.go
@@ -0,0 +1,61 @@
+// Copyright 2017 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_NewIssueUsers(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ newIssue := &issues_model.Issue{
+ RepoID: repo.ID,
+ PosterID: 4,
+ Index: 6,
+ Title: "newTestIssueTitle",
+ Content: "newTestIssueContent",
+ }
+
+ // artificially insert new issue
+ unittest.AssertSuccessfulInsert(t, newIssue)
+
+ require.NoError(t, issues_model.NewIssueUsers(db.DefaultContext, repo, newIssue))
+
+ // issue_user table should now have entries for new issue
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: newIssue.ID, UID: newIssue.PosterID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: newIssue.ID, UID: repo.OwnerID})
+}
+
+func TestUpdateIssueUserByRead(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+
+ require.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, 4, issue.ID))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
+
+ require.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, 4, issue.ID))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: 4}, "is_read=1")
+
+ require.NoError(t, issues_model.UpdateIssueUserByRead(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID))
+}
+
+func TestUpdateIssueUsersByMentions(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+
+ uids := []int64{2, 5}
+ require.NoError(t, issues_model.UpdateIssueUsersByMentions(db.DefaultContext, issue.ID, uids))
+ for _, uid := range uids {
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueUser{IssueID: issue.ID, UID: uid}, "is_mentioned=1")
+ }
+}
diff --git a/models/issues/issue_watch.go b/models/issues/issue_watch.go
new file mode 100644
index 0000000..9e616a0
--- /dev/null
+++ b/models/issues/issue_watch.go
@@ -0,0 +1,134 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+)
+
+// IssueWatch is connection request for receiving issue notification.
+type IssueWatch struct {
+ ID int64 `xorm:"pk autoincr"`
+ UserID int64 `xorm:"UNIQUE(watch) NOT NULL"`
+ IssueID int64 `xorm:"UNIQUE(watch) NOT NULL"`
+ IsWatching bool `xorm:"NOT NULL"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"updated NOT NULL"`
+}
+
+func init() {
+ db.RegisterModel(new(IssueWatch))
+}
+
+// IssueWatchList contains IssueWatch
+type IssueWatchList []*IssueWatch
+
+// CreateOrUpdateIssueWatch set watching for a user and issue
+func CreateOrUpdateIssueWatch(ctx context.Context, userID, issueID int64, isWatching bool) error {
+ iw, exists, err := GetIssueWatch(ctx, userID, issueID)
+ if err != nil {
+ return err
+ }
+
+ if !exists {
+ iw = &IssueWatch{
+ UserID: userID,
+ IssueID: issueID,
+ IsWatching: isWatching,
+ }
+
+ if _, err := db.GetEngine(ctx).Insert(iw); err != nil {
+ return err
+ }
+ } else {
+ iw.IsWatching = isWatching
+
+ if _, err := db.GetEngine(ctx).ID(iw.ID).Cols("is_watching", "updated_unix").Update(iw); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// GetIssueWatch returns all IssueWatch objects from db by user and issue
+// the current Web-UI need iw object for watchers AND explicit non-watchers
+func GetIssueWatch(ctx context.Context, userID, issueID int64) (iw *IssueWatch, exists bool, err error) {
+ iw = new(IssueWatch)
+ exists, err = db.GetEngine(ctx).
+ Where("user_id = ?", userID).
+ And("issue_id = ?", issueID).
+ Get(iw)
+ return iw, exists, err
+}
+
+// CheckIssueWatch check if an user is watching an issue
+// it takes participants and repo watch into account
+func CheckIssueWatch(ctx context.Context, user *user_model.User, issue *Issue) (bool, error) {
+ iw, exist, err := GetIssueWatch(ctx, user.ID, issue.ID)
+ if err != nil {
+ return false, err
+ }
+ if exist {
+ return iw.IsWatching, nil
+ }
+ w, err := repo_model.GetWatch(ctx, user.ID, issue.RepoID)
+ if err != nil {
+ return false, err
+ }
+ return repo_model.IsWatchMode(w.Mode) || IsUserParticipantsOfIssue(ctx, user, issue), nil
+}
+
+// GetIssueWatchersIDs returns IDs of subscribers or explicit unsubscribers to a given issue id
+// but avoids joining with `user` for performance reasons
+// User permissions must be verified elsewhere if required
+func GetIssueWatchersIDs(ctx context.Context, issueID int64, watching bool) ([]int64, error) {
+ ids := make([]int64, 0, 64)
+ return ids, db.GetEngine(ctx).Table("issue_watch").
+ Where("issue_id=?", issueID).
+ And("is_watching = ?", watching).
+ Select("user_id").
+ Find(&ids)
+}
+
+// GetIssueWatchers returns watchers/unwatchers of a given issue
+func GetIssueWatchers(ctx context.Context, issueID int64, listOptions db.ListOptions) (IssueWatchList, error) {
+ sess := db.GetEngine(ctx).
+ Where("`issue_watch`.issue_id = ?", issueID).
+ And("`issue_watch`.is_watching = ?", true).
+ And("`user`.is_active = ?", true).
+ And("`user`.prohibit_login = ?", false).
+ Join("INNER", "`user`", "`user`.id = `issue_watch`.user_id")
+
+ if listOptions.Page != 0 {
+ sess = db.SetSessionPagination(sess, &listOptions)
+ watches := make([]*IssueWatch, 0, listOptions.PageSize)
+ return watches, sess.Find(&watches)
+ }
+ watches := make([]*IssueWatch, 0, 8)
+ return watches, sess.Find(&watches)
+}
+
+// CountIssueWatchers count watchers/unwatchers of a given issue
+func CountIssueWatchers(ctx context.Context, issueID int64) (int64, error) {
+ return db.GetEngine(ctx).
+ Where("`issue_watch`.issue_id = ?", issueID).
+ And("`issue_watch`.is_watching = ?", true).
+ And("`user`.is_active = ?", true).
+ And("`user`.prohibit_login = ?", false).
+ Join("INNER", "`user`", "`user`.id = `issue_watch`.user_id").Count(new(IssueWatch))
+}
+
+// RemoveIssueWatchersByRepoID remove issue watchers by repoID
+func RemoveIssueWatchersByRepoID(ctx context.Context, userID, repoID int64) error {
+ _, err := db.GetEngine(ctx).
+ Join("INNER", "issue", "`issue`.id = `issue_watch`.issue_id AND `issue`.repo_id = ?", repoID).
+ Where("`issue_watch`.user_id = ?", userID).
+ Delete(new(IssueWatch))
+ return err
+}
diff --git a/models/issues/issue_watch_test.go b/models/issues/issue_watch_test.go
new file mode 100644
index 0000000..573215d
--- /dev/null
+++ b/models/issues/issue_watch_test.go
@@ -0,0 +1,68 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateOrUpdateIssueWatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ require.NoError(t, issues_model.CreateOrUpdateIssueWatch(db.DefaultContext, 3, 1, true))
+ iw := unittest.AssertExistsAndLoadBean(t, &issues_model.IssueWatch{UserID: 3, IssueID: 1})
+ assert.True(t, iw.IsWatching)
+
+ require.NoError(t, issues_model.CreateOrUpdateIssueWatch(db.DefaultContext, 1, 1, false))
+ iw = unittest.AssertExistsAndLoadBean(t, &issues_model.IssueWatch{UserID: 1, IssueID: 1})
+ assert.False(t, iw.IsWatching)
+}
+
+func TestGetIssueWatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ _, exists, err := issues_model.GetIssueWatch(db.DefaultContext, 9, 1)
+ assert.True(t, exists)
+ require.NoError(t, err)
+
+ iw, exists, err := issues_model.GetIssueWatch(db.DefaultContext, 2, 2)
+ assert.True(t, exists)
+ require.NoError(t, err)
+ assert.False(t, iw.IsWatching)
+
+ _, exists, err = issues_model.GetIssueWatch(db.DefaultContext, 3, 1)
+ assert.False(t, exists)
+ require.NoError(t, err)
+}
+
+func TestGetIssueWatchers(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ iws, err := issues_model.GetIssueWatchers(db.DefaultContext, 1, db.ListOptions{})
+ require.NoError(t, err)
+ // Watcher is inactive, thus 0
+ assert.Empty(t, iws)
+
+ iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 2, db.ListOptions{})
+ require.NoError(t, err)
+ // Watcher is explicit not watching
+ assert.Empty(t, iws)
+
+ iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 5, db.ListOptions{})
+ require.NoError(t, err)
+ // Issue has no Watchers
+ assert.Empty(t, iws)
+
+ iws, err = issues_model.GetIssueWatchers(db.DefaultContext, 7, db.ListOptions{})
+ require.NoError(t, err)
+ // Issue has one watcher
+ assert.Len(t, iws, 1)
+}
diff --git a/models/issues/issue_xref.go b/models/issues/issue_xref.go
new file mode 100644
index 0000000..9c9d5d6
--- /dev/null
+++ b/models/issues/issue_xref.go
@@ -0,0 +1,364 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/references"
+)
+
+type crossReference struct {
+ Issue *Issue
+ Action references.XRefAction
+}
+
+// crossReferencesContext is context to pass along findCrossReference functions
+type crossReferencesContext struct {
+ Type CommentType
+ Doer *user_model.User
+ OrigIssue *Issue
+ OrigComment *Comment
+ RemoveOld bool
+}
+
+func findOldCrossReferences(ctx context.Context, issueID, commentID int64) ([]*Comment, error) {
+ active := make([]*Comment, 0, 10)
+ return active, db.GetEngine(ctx).Where("`ref_action` IN (?, ?, ?)", references.XRefActionNone, references.XRefActionCloses, references.XRefActionReopens).
+ And("`ref_issue_id` = ?", issueID).
+ And("`ref_comment_id` = ?", commentID).
+ Find(&active)
+}
+
+func neuterCrossReferences(ctx context.Context, issueID, commentID int64) error {
+ active, err := findOldCrossReferences(ctx, issueID, commentID)
+ if err != nil {
+ return err
+ }
+ ids := make([]int64, len(active))
+ for i, c := range active {
+ ids[i] = c.ID
+ }
+ return neuterCrossReferencesIDs(ctx, nil, ids)
+}
+
+func neuterCrossReferencesIDs(stdCtx context.Context, ctx *crossReferencesContext, ids []int64) error {
+ sess := db.GetEngine(stdCtx).In("id", ids).Cols("`ref_action`")
+ if ctx != nil && ctx.OrigIssue.NoAutoTime {
+ sess.SetExpr("updated_unix", ctx.OrigIssue.UpdatedUnix).NoAutoTime()
+ }
+ _, err := sess.Update(&Comment{RefAction: references.XRefActionNeutered})
+ return err
+}
+
+// AddCrossReferences add cross repositories references.
+func (issue *Issue) AddCrossReferences(stdCtx context.Context, doer *user_model.User, removeOld bool) error {
+ var commentType CommentType
+ if issue.IsPull {
+ commentType = CommentTypePullRef
+ } else {
+ commentType = CommentTypeIssueRef
+ }
+ ctx := &crossReferencesContext{
+ Type: commentType,
+ Doer: doer,
+ OrigIssue: issue,
+ RemoveOld: removeOld,
+ }
+ return issue.createCrossReferences(stdCtx, ctx, issue.Title, issue.Content)
+}
+
+func (issue *Issue) createCrossReferences(stdCtx context.Context, ctx *crossReferencesContext, plaincontent, mdcontent string) error {
+ xreflist, err := ctx.OrigIssue.getCrossReferences(stdCtx, ctx, plaincontent, mdcontent)
+ if err != nil {
+ return err
+ }
+ if ctx.RemoveOld {
+ var commentID int64
+ if ctx.OrigComment != nil {
+ commentID = ctx.OrigComment.ID
+ }
+ active, err := findOldCrossReferences(stdCtx, ctx.OrigIssue.ID, commentID)
+ if err != nil {
+ return err
+ }
+ ids := make([]int64, 0, len(active))
+ for _, c := range active {
+ found := false
+ for i, x := range xreflist {
+ if x.Issue.ID == c.IssueID && x.Action == c.RefAction {
+ found = true
+ xreflist = append(xreflist[:i], xreflist[i+1:]...)
+ break
+ }
+ }
+ if !found {
+ ids = append(ids, c.ID)
+ }
+ }
+ if len(ids) > 0 {
+ if err = neuterCrossReferencesIDs(stdCtx, ctx, ids); err != nil {
+ return err
+ }
+ }
+ }
+ for _, xref := range xreflist {
+ var refCommentID int64
+ if ctx.OrigComment != nil {
+ refCommentID = ctx.OrigComment.ID
+ }
+ if ctx.OrigIssue.NoAutoTime {
+ xref.Issue.NoAutoTime = true
+ xref.Issue.UpdatedUnix = ctx.OrigIssue.UpdatedUnix
+ }
+ opts := &CreateCommentOptions{
+ Type: ctx.Type,
+ Doer: ctx.Doer,
+ Repo: xref.Issue.Repo,
+ Issue: xref.Issue,
+ RefRepoID: ctx.OrigIssue.RepoID,
+ RefIssueID: ctx.OrigIssue.ID,
+ RefCommentID: refCommentID,
+ RefAction: xref.Action,
+ RefIsPull: ctx.OrigIssue.IsPull,
+ }
+ _, err := CreateComment(stdCtx, opts)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (issue *Issue) getCrossReferences(stdCtx context.Context, ctx *crossReferencesContext, plaincontent, mdcontent string) ([]*crossReference, error) {
+ xreflist := make([]*crossReference, 0, 5)
+ var (
+ refRepo *repo_model.Repository
+ refIssue *Issue
+ refAction references.XRefAction
+ err error
+ )
+
+ allrefs := append(references.FindAllIssueReferences(plaincontent), references.FindAllIssueReferencesMarkdown(mdcontent)...)
+ for _, ref := range allrefs {
+ if ref.Owner == "" && ref.Name == "" {
+ // Issues in the same repository
+ if err := ctx.OrigIssue.LoadRepo(stdCtx); err != nil {
+ return nil, err
+ }
+ refRepo = ctx.OrigIssue.Repo
+ } else {
+ // Issues in other repositories
+ refRepo, err = repo_model.GetRepositoryByOwnerAndName(stdCtx, ref.Owner, ref.Name)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+ }
+ if refIssue, refAction, err = ctx.OrigIssue.verifyReferencedIssue(stdCtx, ctx, refRepo, ref); err != nil {
+ return nil, err
+ }
+ if refIssue != nil {
+ xreflist = ctx.OrigIssue.updateCrossReferenceList(xreflist, &crossReference{
+ Issue: refIssue,
+ Action: refAction,
+ })
+ }
+ }
+
+ return xreflist, nil
+}
+
+func (issue *Issue) updateCrossReferenceList(list []*crossReference, xref *crossReference) []*crossReference {
+ if xref.Issue.ID == issue.ID {
+ return list
+ }
+ for i, r := range list {
+ if r.Issue.ID == xref.Issue.ID {
+ if xref.Action != references.XRefActionNone {
+ list[i].Action = xref.Action
+ }
+ return list
+ }
+ }
+ return append(list, xref)
+}
+
+// verifyReferencedIssue will check if the referenced issue exists, and whether the doer has permission to do what
+func (issue *Issue) verifyReferencedIssue(stdCtx context.Context, ctx *crossReferencesContext, repo *repo_model.Repository,
+ ref references.IssueReference,
+) (*Issue, references.XRefAction, error) {
+ refIssue := &Issue{RepoID: repo.ID, Index: ref.Index}
+ refAction := ref.Action
+ e := db.GetEngine(stdCtx)
+
+ if has, _ := e.Get(refIssue); !has {
+ return nil, references.XRefActionNone, nil
+ }
+ if err := refIssue.LoadRepo(stdCtx); err != nil {
+ return nil, references.XRefActionNone, err
+ }
+
+ // Close/reopen actions can only be set from pull requests to issues
+ if refIssue.IsPull || !issue.IsPull {
+ refAction = references.XRefActionNone
+ }
+
+ // Check doer permissions; set action to None if the doer can't change the destination
+ if refIssue.RepoID != ctx.OrigIssue.RepoID || ref.Action != references.XRefActionNone {
+ perm, err := access_model.GetUserRepoPermission(stdCtx, refIssue.Repo, ctx.Doer)
+ if err != nil {
+ return nil, references.XRefActionNone, err
+ }
+ if !perm.CanReadIssuesOrPulls(refIssue.IsPull) {
+ return nil, references.XRefActionNone, nil
+ }
+ // Accept close/reopening actions only if the poster is able to close the
+ // referenced issue manually at this moment. The only exception is
+ // the poster of a new PR referencing an issue on the same repo: then the merger
+ // should be responsible for checking whether the reference should resolve.
+ if ref.Action != references.XRefActionNone &&
+ ctx.Doer.ID != refIssue.PosterID &&
+ !perm.CanWriteIssuesOrPulls(refIssue.IsPull) &&
+ (refIssue.RepoID != ctx.OrigIssue.RepoID || ctx.OrigComment != nil) {
+ refAction = references.XRefActionNone
+ }
+ }
+
+ return refIssue, refAction, nil
+}
+
+// AddCrossReferences add cross references
+func (c *Comment) AddCrossReferences(stdCtx context.Context, doer *user_model.User, removeOld bool) error {
+ if c.Type != CommentTypeCode && c.Type != CommentTypeComment {
+ return nil
+ }
+ if err := c.LoadIssue(stdCtx); err != nil {
+ return err
+ }
+ ctx := &crossReferencesContext{
+ Type: CommentTypeCommentRef,
+ Doer: doer,
+ OrigIssue: c.Issue,
+ OrigComment: c,
+ RemoveOld: removeOld,
+ }
+ return c.Issue.createCrossReferences(stdCtx, ctx, "", c.Content)
+}
+
+func (c *Comment) neuterCrossReferences(ctx context.Context) error {
+ return neuterCrossReferences(ctx, c.IssueID, c.ID)
+}
+
+// LoadRefComment loads comment that created this reference from database
+func (c *Comment) LoadRefComment(ctx context.Context) (err error) {
+ if c.RefComment != nil {
+ return nil
+ }
+ c.RefComment, err = GetCommentByID(ctx, c.RefCommentID)
+ return err
+}
+
+// LoadRefIssue loads comment that created this reference from database
+func (c *Comment) LoadRefIssue(ctx context.Context) (err error) {
+ if c.RefIssue != nil {
+ return nil
+ }
+ c.RefIssue, err = GetIssueByID(ctx, c.RefIssueID)
+ if err == nil {
+ err = c.RefIssue.LoadRepo(ctx)
+ }
+ return err
+}
+
+// CommentTypeIsRef returns true if CommentType is a reference from another issue
+func CommentTypeIsRef(t CommentType) bool {
+ return t == CommentTypeCommentRef || t == CommentTypePullRef || t == CommentTypeIssueRef
+}
+
+// RefCommentLink returns the relative URL for the comment that created this reference
+func (c *Comment) RefCommentLink(ctx context.Context) string {
+ // Edge case for when the reference is inside the title or the description of the referring issue
+ if c.RefCommentID == 0 {
+ return c.RefIssueLink(ctx)
+ }
+ if err := c.LoadRefComment(ctx); err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadRefComment(%d): %v", c.RefCommentID, err)
+ return ""
+ }
+ return c.RefComment.Link(ctx)
+}
+
+// RefIssueLink returns the relative URL of the issue where this reference was created
+func (c *Comment) RefIssueLink(ctx context.Context) string {
+ if err := c.LoadRefIssue(ctx); err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadRefIssue(%d): %v", c.RefCommentID, err)
+ return ""
+ }
+ return c.RefIssue.Link()
+}
+
+// RefIssueTitle returns the title of the issue where this reference was created
+func (c *Comment) RefIssueTitle(ctx context.Context) string {
+ if err := c.LoadRefIssue(ctx); err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadRefIssue(%d): %v", c.RefCommentID, err)
+ return ""
+ }
+ return c.RefIssue.Title
+}
+
+// RefIssueIdent returns the user friendly identity (e.g. "#1234") of the issue where this reference was created
+func (c *Comment) RefIssueIdent(ctx context.Context) string {
+ if err := c.LoadRefIssue(ctx); err != nil { // Silently dropping errors :unamused:
+ log.Error("LoadRefIssue(%d): %v", c.RefCommentID, err)
+ return ""
+ }
+ // FIXME: check this name for cross-repository references (#7901 if it gets merged)
+ return fmt.Sprintf("#%d", c.RefIssue.Index)
+}
+
+// __________ .__ .__ __________ __
+// \______ \__ __| | | |\______ \ ____ ________ __ ____ _______/ |_
+// | ___/ | \ | | | | _// __ \/ ____/ | \_/ __ \ / ___/\ __\
+// | | | | / |_| |_| | \ ___< <_| | | /\ ___/ \___ \ | |
+// |____| |____/|____/____/____|_ /\___ >__ |____/ \___ >____ > |__|
+// \/ \/ |__| \/ \/
+
+// ResolveCrossReferences will return the list of references to close/reopen by this PR
+func (pr *PullRequest) ResolveCrossReferences(ctx context.Context) ([]*Comment, error) {
+ unfiltered := make([]*Comment, 0, 5)
+ if err := db.GetEngine(ctx).
+ Where("ref_repo_id = ? AND ref_issue_id = ?", pr.Issue.RepoID, pr.Issue.ID).
+ In("ref_action", []references.XRefAction{references.XRefActionCloses, references.XRefActionReopens}).
+ OrderBy("id").
+ Find(&unfiltered); err != nil {
+ return nil, fmt.Errorf("get reference: %w", err)
+ }
+
+ refs := make([]*Comment, 0, len(unfiltered))
+ for _, ref := range unfiltered {
+ found := false
+ for i, r := range refs {
+ if r.IssueID == ref.IssueID {
+ // Keep only the latest
+ refs[i] = ref
+ found = true
+ break
+ }
+ }
+ if !found {
+ refs = append(refs, ref)
+ }
+ }
+
+ return refs, nil
+}
diff --git a/models/issues/issue_xref_test.go b/models/issues/issue_xref_test.go
new file mode 100644
index 0000000..a24d1b0
--- /dev/null
+++ b/models/issues/issue_xref_test.go
@@ -0,0 +1,185 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "fmt"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/references"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestXRef_AddCrossReferences(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Issue #1 to test against
+ itarget := testCreateIssue(t, 1, 2, "title1", "content1", false)
+
+ // PR to close issue #1
+ content := fmt.Sprintf("content2, closes #%d", itarget.Index)
+ pr := testCreateIssue(t, 1, 2, "title2", content, true)
+ ref := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: 0})
+ assert.Equal(t, issues_model.CommentTypePullRef, ref.Type)
+ assert.Equal(t, pr.RepoID, ref.RefRepoID)
+ assert.True(t, ref.RefIsPull)
+ assert.Equal(t, references.XRefActionCloses, ref.RefAction)
+
+ // Comment on PR to reopen issue #1
+ content = fmt.Sprintf("content2, reopens #%d", itarget.Index)
+ c := testCreateComment(t, 2, pr.ID, content)
+ ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: pr.ID, RefCommentID: c.ID})
+ assert.Equal(t, issues_model.CommentTypeCommentRef, ref.Type)
+ assert.Equal(t, pr.RepoID, ref.RefRepoID)
+ assert.True(t, ref.RefIsPull)
+ assert.Equal(t, references.XRefActionReopens, ref.RefAction)
+
+ // Issue mentioning issue #1
+ content = fmt.Sprintf("content3, mentions #%d", itarget.Index)
+ i := testCreateIssue(t, 1, 2, "title3", content, false)
+ ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
+ assert.Equal(t, issues_model.CommentTypeIssueRef, ref.Type)
+ assert.Equal(t, pr.RepoID, ref.RefRepoID)
+ assert.False(t, ref.RefIsPull)
+ assert.Equal(t, references.XRefActionNone, ref.RefAction)
+
+ // Issue #4 to test against
+ itarget = testCreateIssue(t, 3, 3, "title4", "content4", false)
+
+ // Cross-reference to issue #4 by admin
+ content = fmt.Sprintf("content5, mentions org3/repo3#%d", itarget.Index)
+ i = testCreateIssue(t, 2, 1, "title5", content, false)
+ ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
+ assert.Equal(t, issues_model.CommentTypeIssueRef, ref.Type)
+ assert.Equal(t, i.RepoID, ref.RefRepoID)
+ assert.False(t, ref.RefIsPull)
+ assert.Equal(t, references.XRefActionNone, ref.RefAction)
+
+ // Cross-reference to issue #4 with no permission
+ content = fmt.Sprintf("content6, mentions org3/repo3#%d", itarget.Index)
+ i = testCreateIssue(t, 4, 5, "title6", content, false)
+ unittest.AssertNotExistsBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
+}
+
+func TestXRef_NeuterCrossReferences(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // Issue #1 to test against
+ itarget := testCreateIssue(t, 1, 2, "title1", "content1", false)
+
+ // Issue mentioning issue #1
+ title := fmt.Sprintf("title2, mentions #%d", itarget.Index)
+ i := testCreateIssue(t, 1, 2, title, "content2", false)
+ ref := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
+ assert.Equal(t, issues_model.CommentTypeIssueRef, ref.Type)
+ assert.Equal(t, references.XRefActionNone, ref.RefAction)
+
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ i.Title = "title2, no mentions"
+ require.NoError(t, issues_model.ChangeIssueTitle(db.DefaultContext, i, d, title))
+
+ ref = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: itarget.ID, RefIssueID: i.ID, RefCommentID: 0})
+ assert.Equal(t, issues_model.CommentTypeIssueRef, ref.Type)
+ assert.Equal(t, references.XRefActionNeutered, ref.RefAction)
+}
+
+func TestXRef_ResolveCrossReferences(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ i1 := testCreateIssue(t, 1, 2, "title1", "content1", false)
+ i2 := testCreateIssue(t, 1, 2, "title2", "content2", false)
+ i3 := testCreateIssue(t, 1, 2, "title3", "content3", false)
+ _, err := issues_model.ChangeIssueStatus(db.DefaultContext, i3, d, true)
+ require.NoError(t, err)
+
+ pr := testCreatePR(t, 1, 2, "titlepr", fmt.Sprintf("closes #%d", i1.Index))
+ rp := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i1.ID, RefIssueID: pr.Issue.ID, RefCommentID: 0})
+
+ c1 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i2.Index))
+ r1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c1.ID})
+
+ // Must be ignored
+ c2 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("mentions #%d", i2.Index))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i2.ID, RefIssueID: pr.Issue.ID, RefCommentID: c2.ID})
+
+ // Must be superseded by c4/r4
+ c3 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("reopens #%d", i3.Index))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c3.ID})
+
+ c4 := testCreateComment(t, 2, pr.Issue.ID, fmt.Sprintf("closes #%d", i3.Index))
+ r4 := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{IssueID: i3.ID, RefIssueID: pr.Issue.ID, RefCommentID: c4.ID})
+
+ refs, err := pr.ResolveCrossReferences(db.DefaultContext)
+ require.NoError(t, err)
+ assert.Len(t, refs, 3)
+ assert.Equal(t, rp.ID, refs[0].ID, "bad ref rp: %+v", refs[0])
+ assert.Equal(t, r1.ID, refs[1].ID, "bad ref r1: %+v", refs[1])
+ assert.Equal(t, r4.ID, refs[2].ID, "bad ref r4: %+v", refs[2])
+}
+
+func testCreateIssue(t *testing.T, repo, doer int64, title, content string, ispull bool) *issues_model.Issue {
+ r := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo})
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer})
+
+ ctx, committer, err := db.TxContext(db.DefaultContext)
+ require.NoError(t, err)
+ defer committer.Close()
+
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", r.ID)
+ require.NoError(t, err)
+ i := &issues_model.Issue{
+ RepoID: r.ID,
+ PosterID: d.ID,
+ Poster: d,
+ Title: title,
+ Content: content,
+ IsPull: ispull,
+ Index: idx,
+ }
+
+ err = issues_model.NewIssueWithIndex(ctx, d, issues_model.NewIssueOptions{
+ Repo: r,
+ Issue: i,
+ })
+ require.NoError(t, err)
+ i, err = issues_model.GetIssueByID(ctx, i.ID)
+ require.NoError(t, err)
+ require.NoError(t, i.AddCrossReferences(ctx, d, false))
+ require.NoError(t, committer.Commit())
+ return i
+}
+
+func testCreatePR(t *testing.T, repo, doer int64, title, content string) *issues_model.PullRequest {
+ r := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo})
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer})
+ i := &issues_model.Issue{RepoID: r.ID, PosterID: d.ID, Poster: d, Title: title, Content: content, IsPull: true}
+ pr := &issues_model.PullRequest{HeadRepoID: repo, BaseRepoID: repo, HeadBranch: "head", BaseBranch: "base", Status: issues_model.PullRequestStatusMergeable}
+ require.NoError(t, issues_model.NewPullRequest(db.DefaultContext, r, i, nil, nil, pr))
+ pr.Issue = i
+ return pr
+}
+
+func testCreateComment(t *testing.T, doer, issue int64, content string) *issues_model.Comment {
+ d := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doer})
+ i := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issue})
+ c := &issues_model.Comment{Type: issues_model.CommentTypeComment, PosterID: doer, Poster: d, IssueID: issue, Issue: i, Content: content}
+
+ ctx, committer, err := db.TxContext(db.DefaultContext)
+ require.NoError(t, err)
+ defer committer.Close()
+ err = db.Insert(ctx, c)
+ require.NoError(t, err)
+ require.NoError(t, c.AddCrossReferences(ctx, d, false))
+ require.NoError(t, committer.Commit())
+ return c
+}
diff --git a/models/issues/label.go b/models/issues/label.go
new file mode 100644
index 0000000..61478e1
--- /dev/null
+++ b/models/issues/label.go
@@ -0,0 +1,509 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "slices"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrRepoLabelNotExist represents a "RepoLabelNotExist" kind of error.
+type ErrRepoLabelNotExist struct {
+ LabelID int64
+ RepoID int64
+}
+
+// IsErrRepoLabelNotExist checks if an error is a RepoErrLabelNotExist.
+func IsErrRepoLabelNotExist(err error) bool {
+ _, ok := err.(ErrRepoLabelNotExist)
+ return ok
+}
+
+func (err ErrRepoLabelNotExist) Error() string {
+ return fmt.Sprintf("label does not exist [label_id: %d, repo_id: %d]", err.LabelID, err.RepoID)
+}
+
+func (err ErrRepoLabelNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrOrgLabelNotExist represents a "OrgLabelNotExist" kind of error.
+type ErrOrgLabelNotExist struct {
+ LabelID int64
+ OrgID int64
+}
+
+// IsErrOrgLabelNotExist checks if an error is a OrgErrLabelNotExist.
+func IsErrOrgLabelNotExist(err error) bool {
+ _, ok := err.(ErrOrgLabelNotExist)
+ return ok
+}
+
+func (err ErrOrgLabelNotExist) Error() string {
+ return fmt.Sprintf("label does not exist [label_id: %d, org_id: %d]", err.LabelID, err.OrgID)
+}
+
+func (err ErrOrgLabelNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrLabelNotExist represents a "LabelNotExist" kind of error.
+type ErrLabelNotExist struct {
+ LabelID int64
+}
+
+// IsErrLabelNotExist checks if an error is a ErrLabelNotExist.
+func IsErrLabelNotExist(err error) bool {
+ _, ok := err.(ErrLabelNotExist)
+ return ok
+}
+
+func (err ErrLabelNotExist) Error() string {
+ return fmt.Sprintf("label does not exist [label_id: %d]", err.LabelID)
+}
+
+func (err ErrLabelNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// Label represents a label of repository for issues.
+type Label struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ OrgID int64 `xorm:"INDEX"`
+ Name string
+ Exclusive bool
+ Description string
+ Color string `xorm:"VARCHAR(7)"`
+ NumIssues int
+ NumClosedIssues int
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+
+ NumOpenIssues int `xorm:"-"`
+ NumOpenRepoIssues int64 `xorm:"-"`
+ IsChecked bool `xorm:"-"`
+ QueryString string `xorm:"-"`
+ IsSelected bool `xorm:"-"`
+ IsExcluded bool `xorm:"-"`
+
+ ArchivedUnix timeutil.TimeStamp `xorm:"DEFAULT NULL"`
+}
+
+func init() {
+ db.RegisterModel(new(Label))
+ db.RegisterModel(new(IssueLabel))
+}
+
+// CalOpenIssues sets the number of open issues of a label based on the already stored number of closed issues.
+func (l *Label) CalOpenIssues() {
+ l.NumOpenIssues = l.NumIssues - l.NumClosedIssues
+}
+
+// SetArchived set the label as archived
+func (l *Label) SetArchived(isArchived bool) {
+ if !isArchived {
+ l.ArchivedUnix = timeutil.TimeStamp(0)
+ } else if isArchived && !l.IsArchived() {
+ // Only change the date when it is newly archived.
+ l.ArchivedUnix = timeutil.TimeStampNow()
+ }
+}
+
+// IsArchived returns true if label is an archived
+func (l *Label) IsArchived() bool {
+ return !l.ArchivedUnix.IsZero()
+}
+
+// CalOpenOrgIssues calculates the open issues of a label for a specific repo
+func (l *Label) CalOpenOrgIssues(ctx context.Context, repoID, labelID int64) {
+ counts, _ := CountIssuesByRepo(ctx, &IssuesOptions{
+ RepoIDs: []int64{repoID},
+ LabelIDs: []int64{labelID},
+ IsClosed: optional.Some(false),
+ })
+
+ for _, count := range counts {
+ l.NumOpenRepoIssues += count
+ }
+}
+
+// LoadSelectedLabelsAfterClick calculates the set of selected labels when a label is clicked
+func (l *Label) LoadSelectedLabelsAfterClick(currentSelectedLabels []int64, currentSelectedExclusiveScopes []string) {
+ labelQuerySlice := []int64{}
+ labelSelected := false
+ exclusiveScope := l.ExclusiveScope()
+ for i, curSel := range currentSelectedLabels {
+ if curSel == l.ID {
+ labelSelected = true
+ } else if -curSel == l.ID {
+ labelSelected = true
+ l.IsExcluded = true
+ } else if curSel != 0 {
+ // Exclude other labels in the same scope from selection
+ if curSel < 0 || exclusiveScope == "" || exclusiveScope != currentSelectedExclusiveScopes[i] {
+ labelQuerySlice = append(labelQuerySlice, curSel)
+ }
+ }
+ }
+
+ if !labelSelected {
+ labelQuerySlice = append(labelQuerySlice, l.ID)
+ }
+ l.IsSelected = labelSelected
+
+ // Sort and deduplicate the ids to avoid the crawlers asking for the
+ // same thing with simply a different order of parameters
+ slices.Sort(labelQuerySlice)
+ labelQuerySlice = slices.Compact(labelQuerySlice)
+ // Quick conversion (strings.Join() doesn't accept slices of Int64)
+ labelQuerySliceStrings := make([]string, len(labelQuerySlice))
+ for i, x := range labelQuerySlice {
+ labelQuerySliceStrings[i] = strconv.FormatInt(x, 10)
+ }
+ l.QueryString = strings.Join(labelQuerySliceStrings, ",")
+}
+
+// BelongsToOrg returns true if label is an organization label
+func (l *Label) BelongsToOrg() bool {
+ return l.OrgID > 0
+}
+
+// BelongsToRepo returns true if label is a repository label
+func (l *Label) BelongsToRepo() bool {
+ return l.RepoID > 0
+}
+
+// ExclusiveScope returns scope substring of label name, or empty string if none exists
+func (l *Label) ExclusiveScope() string {
+ if !l.Exclusive {
+ return ""
+ }
+ lastIndex := strings.LastIndex(l.Name, "/")
+ if lastIndex == -1 || lastIndex == 0 || lastIndex == len(l.Name)-1 {
+ return ""
+ }
+ return l.Name[:lastIndex]
+}
+
+// NewLabel creates a new label
+func NewLabel(ctx context.Context, l *Label) error {
+ color, err := label.NormalizeColor(l.Color)
+ if err != nil {
+ return err
+ }
+ l.Color = color
+
+ return db.Insert(ctx, l)
+}
+
+// NewLabels creates new labels
+func NewLabels(ctx context.Context, labels ...*Label) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ for _, l := range labels {
+ color, err := label.NormalizeColor(l.Color)
+ if err != nil {
+ return err
+ }
+ l.Color = color
+
+ if err := db.Insert(ctx, l); err != nil {
+ return err
+ }
+ }
+ return committer.Commit()
+}
+
+// UpdateLabel updates label information.
+func UpdateLabel(ctx context.Context, l *Label) error {
+ color, err := label.NormalizeColor(l.Color)
+ if err != nil {
+ return err
+ }
+ l.Color = color
+
+ return updateLabelCols(ctx, l, "name", "description", "color", "exclusive", "archived_unix")
+}
+
+// DeleteLabel delete a label
+func DeleteLabel(ctx context.Context, id, labelID int64) error {
+ l, err := GetLabelByID(ctx, labelID)
+ if err != nil {
+ if IsErrLabelNotExist(err) {
+ return nil
+ }
+ return err
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ sess := db.GetEngine(ctx)
+
+ if l.BelongsToOrg() && l.OrgID != id {
+ return nil
+ }
+ if l.BelongsToRepo() && l.RepoID != id {
+ return nil
+ }
+
+ if _, err = db.DeleteByID[Label](ctx, labelID); err != nil {
+ return err
+ } else if _, err = sess.
+ Where("label_id = ?", labelID).
+ Delete(new(IssueLabel)); err != nil {
+ return err
+ }
+
+ // delete comments about now deleted label_id
+ if _, err = sess.Where("label_id = ?", labelID).Cols("label_id").Delete(&Comment{}); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// GetLabelByID returns a label by given ID.
+func GetLabelByID(ctx context.Context, labelID int64) (*Label, error) {
+ if labelID <= 0 {
+ return nil, ErrLabelNotExist{labelID}
+ }
+
+ l := &Label{}
+ has, err := db.GetEngine(ctx).ID(labelID).Get(l)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrLabelNotExist{l.ID}
+ }
+ return l, nil
+}
+
+// GetLabelsByIDs returns a list of labels by IDs
+func GetLabelsByIDs(ctx context.Context, labelIDs []int64, cols ...string) ([]*Label, error) {
+ labels := make([]*Label, 0, len(labelIDs))
+ return labels, db.GetEngine(ctx).Table("label").
+ In("id", labelIDs).
+ Asc("name").
+ Cols(cols...).
+ Find(&labels)
+}
+
+// GetLabelInRepoByName returns a label by name in given repository.
+func GetLabelInRepoByName(ctx context.Context, repoID int64, labelName string) (*Label, error) {
+ if len(labelName) == 0 || repoID <= 0 {
+ return nil, ErrRepoLabelNotExist{0, repoID}
+ }
+
+ l, exist, err := db.Get[Label](ctx, builder.Eq{"name": labelName, "repo_id": repoID})
+ if err != nil {
+ return nil, err
+ } else if !exist {
+ return nil, ErrRepoLabelNotExist{0, repoID}
+ }
+ return l, nil
+}
+
+// GetLabelInRepoByID returns a label by ID in given repository.
+func GetLabelInRepoByID(ctx context.Context, repoID, labelID int64) (*Label, error) {
+ if labelID <= 0 || repoID <= 0 {
+ return nil, ErrRepoLabelNotExist{labelID, repoID}
+ }
+
+ l, exist, err := db.Get[Label](ctx, builder.Eq{"id": labelID, "repo_id": repoID})
+ if err != nil {
+ return nil, err
+ } else if !exist {
+ return nil, ErrRepoLabelNotExist{labelID, repoID}
+ }
+ return l, nil
+}
+
+// GetLabelIDsInRepoByNames returns a list of labelIDs by names in a given
+// repository.
+// it silently ignores label names that do not belong to the repository.
+func GetLabelIDsInRepoByNames(ctx context.Context, repoID int64, labelNames []string) ([]int64, error) {
+ labelIDs := make([]int64, 0, len(labelNames))
+ return labelIDs, db.GetEngine(ctx).Table("label").
+ Where("repo_id = ?", repoID).
+ In("name", labelNames).
+ Asc("name").
+ Cols("id").
+ Find(&labelIDs)
+}
+
+// BuildLabelNamesIssueIDsCondition returns a builder where get issue ids match label names
+func BuildLabelNamesIssueIDsCondition(labelNames []string) *builder.Builder {
+ return builder.Select("issue_label.issue_id").
+ From("issue_label").
+ InnerJoin("label", "label.id = issue_label.label_id").
+ Where(
+ builder.In("label.name", labelNames),
+ ).
+ GroupBy("issue_label.issue_id")
+}
+
+// GetLabelsInRepoByIDs returns a list of labels by IDs in given repository,
+// it silently ignores label IDs that do not belong to the repository.
+func GetLabelsInRepoByIDs(ctx context.Context, repoID int64, labelIDs []int64) ([]*Label, error) {
+ labels := make([]*Label, 0, len(labelIDs))
+ return labels, db.GetEngine(ctx).
+ Where("repo_id = ?", repoID).
+ In("id", labelIDs).
+ Asc("name").
+ Find(&labels)
+}
+
+// GetLabelsByRepoID returns all labels that belong to given repository by ID.
+func GetLabelsByRepoID(ctx context.Context, repoID int64, sortType string, listOptions db.ListOptions) ([]*Label, error) {
+ if repoID <= 0 {
+ return nil, ErrRepoLabelNotExist{0, repoID}
+ }
+ labels := make([]*Label, 0, 10)
+ sess := db.GetEngine(ctx).Where("repo_id = ?", repoID)
+
+ switch sortType {
+ case "reversealphabetically":
+ sess.Desc("name")
+ case "leastissues":
+ sess.Asc("num_issues")
+ case "mostissues":
+ sess.Desc("num_issues")
+ default:
+ sess.Asc("name")
+ }
+
+ if listOptions.Page != 0 {
+ sess = db.SetSessionPagination(sess, &listOptions)
+ }
+
+ return labels, sess.Find(&labels)
+}
+
+// CountLabelsByRepoID count number of all labels that belong to given repository by ID.
+func CountLabelsByRepoID(ctx context.Context, repoID int64) (int64, error) {
+ return db.GetEngine(ctx).Where("repo_id = ?", repoID).Count(&Label{})
+}
+
+// GetLabelInOrgByName returns a label by name in given organization.
+func GetLabelInOrgByName(ctx context.Context, orgID int64, labelName string) (*Label, error) {
+ if len(labelName) == 0 || orgID <= 0 {
+ return nil, ErrOrgLabelNotExist{0, orgID}
+ }
+
+ l, exist, err := db.Get[Label](ctx, builder.Eq{"name": labelName, "org_id": orgID})
+ if err != nil {
+ return nil, err
+ } else if !exist {
+ return nil, ErrOrgLabelNotExist{0, orgID}
+ }
+ return l, nil
+}
+
+// GetLabelInOrgByID returns a label by ID in given organization.
+func GetLabelInOrgByID(ctx context.Context, orgID, labelID int64) (*Label, error) {
+ if labelID <= 0 || orgID <= 0 {
+ return nil, ErrOrgLabelNotExist{labelID, orgID}
+ }
+
+ l, exist, err := db.Get[Label](ctx, builder.Eq{"id": labelID, "org_id": orgID})
+ if err != nil {
+ return nil, err
+ } else if !exist {
+ return nil, ErrOrgLabelNotExist{labelID, orgID}
+ }
+ return l, nil
+}
+
+// GetLabelsInOrgByIDs returns a list of labels by IDs in given organization,
+// it silently ignores label IDs that do not belong to the organization.
+func GetLabelsInOrgByIDs(ctx context.Context, orgID int64, labelIDs []int64) ([]*Label, error) {
+ labels := make([]*Label, 0, len(labelIDs))
+ return labels, db.GetEngine(ctx).
+ Where("org_id = ?", orgID).
+ In("id", labelIDs).
+ Asc("name").
+ Find(&labels)
+}
+
+// GetLabelsByOrgID returns all labels that belong to given organization by ID.
+func GetLabelsByOrgID(ctx context.Context, orgID int64, sortType string, listOptions db.ListOptions) ([]*Label, error) {
+ if orgID <= 0 {
+ return nil, ErrOrgLabelNotExist{0, orgID}
+ }
+ labels := make([]*Label, 0, 10)
+ sess := db.GetEngine(ctx).Where("org_id = ?", orgID)
+
+ switch sortType {
+ case "reversealphabetically":
+ sess.Desc("name")
+ case "leastissues":
+ sess.Asc("num_issues")
+ case "mostissues":
+ sess.Desc("num_issues")
+ default:
+ sess.Asc("name")
+ }
+
+ if listOptions.Page != 0 {
+ sess = db.SetSessionPagination(sess, &listOptions)
+ }
+
+ return labels, sess.Find(&labels)
+}
+
+// GetLabelIDsByNames returns a list of labelIDs by names.
+// It doesn't filter them by repo or org, so it could return labels belonging to different repos/orgs.
+// It's used for filtering issues via indexer, otherwise it would be useless.
+// Since it could return labels with the same name, so the length of returned ids could be more than the length of names.
+func GetLabelIDsByNames(ctx context.Context, labelNames []string) ([]int64, error) {
+ labelIDs := make([]int64, 0, len(labelNames))
+ return labelIDs, db.GetEngine(ctx).Table("label").
+ In("name", labelNames).
+ Cols("id").
+ Find(&labelIDs)
+}
+
+// CountLabelsByOrgID count all labels that belong to given organization by ID.
+func CountLabelsByOrgID(ctx context.Context, orgID int64) (int64, error) {
+ return db.GetEngine(ctx).Where("org_id = ?", orgID).Count(&Label{})
+}
+
+func updateLabelCols(ctx context.Context, l *Label, cols ...string) error {
+ _, err := db.GetEngine(ctx).ID(l.ID).
+ SetExpr("num_issues",
+ builder.Select("count(*)").From("issue_label").
+ Where(builder.Eq{"label_id": l.ID}),
+ ).
+ SetExpr("num_closed_issues",
+ builder.Select("count(*)").From("issue_label").
+ InnerJoin("issue", "issue_label.issue_id = issue.id").
+ Where(builder.Eq{
+ "issue_label.label_id": l.ID,
+ "issue.is_closed": true,
+ }),
+ ).
+ Cols(cols...).Update(l)
+ return err
+}
diff --git a/models/issues/label_test.go b/models/issues/label_test.go
new file mode 100644
index 0000000..b03fc1c
--- /dev/null
+++ b/models/issues/label_test.go
@@ -0,0 +1,422 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLabel_CalOpenIssues(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ label.CalOpenIssues()
+ assert.EqualValues(t, 2, label.NumOpenIssues)
+}
+
+func TestLabel_LoadSelectedLabelsAfterClick(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ // Loading the label id:8 (scope/label2) which have a scope and an
+ // exclusivity with id:7 (scope/label1)
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 8})
+
+ // First test : with negative and scope
+ label.LoadSelectedLabelsAfterClick([]int64{1, -8}, []string{"", "scope"})
+ assert.Equal(t, "1", label.QueryString)
+ assert.True(t, label.IsSelected)
+
+ // Second test : with duplicates
+ label.LoadSelectedLabelsAfterClick([]int64{1, 7, 1, 7, 7}, []string{"", "scope", "", "scope", "scope"})
+ assert.Equal(t, "1,8", label.QueryString)
+ assert.False(t, label.IsSelected)
+
+ // Third test : empty set
+ label.LoadSelectedLabelsAfterClick([]int64{}, []string{})
+ assert.False(t, label.IsSelected)
+ assert.Equal(t, "8", label.QueryString)
+}
+
+func TestLabel_ExclusiveScope(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 7})
+ assert.Equal(t, "scope", label.ExclusiveScope())
+
+ label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 9})
+ assert.Equal(t, "scope/subscope", label.ExclusiveScope())
+}
+
+func TestNewLabels(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ labels := []*issues_model.Label{
+ {RepoID: 2, Name: "labelName2", Color: "#123456"},
+ {RepoID: 3, Name: "labelName3", Color: "#123"},
+ {RepoID: 4, Name: "labelName4", Color: "ABCDEF"},
+ {RepoID: 5, Name: "labelName5", Color: "DEF"},
+ }
+ require.Error(t, issues_model.NewLabel(db.DefaultContext, &issues_model.Label{RepoID: 3, Name: "invalid Color", Color: ""}))
+ require.Error(t, issues_model.NewLabel(db.DefaultContext, &issues_model.Label{RepoID: 3, Name: "invalid Color", Color: "#45G"}))
+ require.Error(t, issues_model.NewLabel(db.DefaultContext, &issues_model.Label{RepoID: 3, Name: "invalid Color", Color: "#12345G"}))
+ require.Error(t, issues_model.NewLabel(db.DefaultContext, &issues_model.Label{RepoID: 3, Name: "invalid Color", Color: "45G"}))
+ require.Error(t, issues_model.NewLabel(db.DefaultContext, &issues_model.Label{RepoID: 3, Name: "invalid Color", Color: "12345G"}))
+ for _, label := range labels {
+ unittest.AssertNotExistsBean(t, label)
+ }
+ require.NoError(t, issues_model.NewLabels(db.DefaultContext, labels...))
+ for _, label := range labels {
+ unittest.AssertExistsAndLoadBean(t, label, unittest.Cond("id = ?", label.ID))
+ }
+ unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
+}
+
+func TestGetLabelByID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label, err := issues_model.GetLabelByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.EqualValues(t, 1, label.ID)
+
+ _, err = issues_model.GetLabelByID(db.DefaultContext, unittest.NonexistentID)
+ assert.True(t, issues_model.IsErrLabelNotExist(err))
+}
+
+func TestGetLabelInRepoByName(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label, err := issues_model.GetLabelInRepoByName(db.DefaultContext, 1, "label1")
+ require.NoError(t, err)
+ assert.EqualValues(t, 1, label.ID)
+ assert.Equal(t, "label1", label.Name)
+
+ _, err = issues_model.GetLabelInRepoByName(db.DefaultContext, 1, "")
+ assert.True(t, issues_model.IsErrRepoLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInRepoByName(db.DefaultContext, unittest.NonexistentID, "nonexistent")
+ assert.True(t, issues_model.IsErrRepoLabelNotExist(err))
+}
+
+func TestGetLabelInRepoByNames(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ labelIDs, err := issues_model.GetLabelIDsInRepoByNames(db.DefaultContext, 1, []string{"label1", "label2"})
+ require.NoError(t, err)
+
+ assert.Len(t, labelIDs, 2)
+
+ assert.Equal(t, int64(1), labelIDs[0])
+ assert.Equal(t, int64(2), labelIDs[1])
+}
+
+func TestGetLabelInRepoByNamesDiscardsNonExistentLabels(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ // label3 doesn't exists.. See labels.yml
+ labelIDs, err := issues_model.GetLabelIDsInRepoByNames(db.DefaultContext, 1, []string{"label1", "label2", "label3"})
+ require.NoError(t, err)
+
+ assert.Len(t, labelIDs, 2)
+
+ assert.Equal(t, int64(1), labelIDs[0])
+ assert.Equal(t, int64(2), labelIDs[1])
+ require.NoError(t, err)
+}
+
+func TestGetLabelInRepoByID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label, err := issues_model.GetLabelInRepoByID(db.DefaultContext, 1, 1)
+ require.NoError(t, err)
+ assert.EqualValues(t, 1, label.ID)
+
+ _, err = issues_model.GetLabelInRepoByID(db.DefaultContext, 1, -1)
+ assert.True(t, issues_model.IsErrRepoLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInRepoByID(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID)
+ assert.True(t, issues_model.IsErrRepoLabelNotExist(err))
+}
+
+func TestGetLabelsInRepoByIDs(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ labels, err := issues_model.GetLabelsInRepoByIDs(db.DefaultContext, 1, []int64{1, 2, unittest.NonexistentID})
+ require.NoError(t, err)
+ if assert.Len(t, labels, 2) {
+ assert.EqualValues(t, 1, labels[0].ID)
+ assert.EqualValues(t, 2, labels[1].ID)
+ }
+}
+
+func TestGetLabelsByRepoID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ testSuccess := func(repoID int64, sortType string, expectedIssueIDs []int64) {
+ labels, err := issues_model.GetLabelsByRepoID(db.DefaultContext, repoID, sortType, db.ListOptions{})
+ require.NoError(t, err)
+ assert.Len(t, labels, len(expectedIssueIDs))
+ for i, label := range labels {
+ assert.EqualValues(t, expectedIssueIDs[i], label.ID)
+ }
+ }
+ testSuccess(1, "leastissues", []int64{2, 1})
+ testSuccess(1, "mostissues", []int64{1, 2})
+ testSuccess(1, "reversealphabetically", []int64{2, 1})
+ testSuccess(1, "default", []int64{1, 2})
+}
+
+// Org versions
+
+func TestGetLabelInOrgByName(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label, err := issues_model.GetLabelInOrgByName(db.DefaultContext, 3, "orglabel3")
+ require.NoError(t, err)
+ assert.EqualValues(t, 3, label.ID)
+ assert.Equal(t, "orglabel3", label.Name)
+
+ _, err = issues_model.GetLabelInOrgByName(db.DefaultContext, 3, "")
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByName(db.DefaultContext, 0, "orglabel3")
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByName(db.DefaultContext, -1, "orglabel3")
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByName(db.DefaultContext, unittest.NonexistentID, "nonexistent")
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+}
+
+func TestGetLabelInOrgByID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label, err := issues_model.GetLabelInOrgByID(db.DefaultContext, 3, 3)
+ require.NoError(t, err)
+ assert.EqualValues(t, 3, label.ID)
+
+ _, err = issues_model.GetLabelInOrgByID(db.DefaultContext, 3, -1)
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByID(db.DefaultContext, 0, 3)
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByID(db.DefaultContext, -1, 3)
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelInOrgByID(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID)
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+}
+
+func TestGetLabelsInOrgByIDs(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ labels, err := issues_model.GetLabelsInOrgByIDs(db.DefaultContext, 3, []int64{3, 4, unittest.NonexistentID})
+ require.NoError(t, err)
+ if assert.Len(t, labels, 2) {
+ assert.EqualValues(t, 3, labels[0].ID)
+ assert.EqualValues(t, 4, labels[1].ID)
+ }
+}
+
+func TestGetLabelsByOrgID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ testSuccess := func(orgID int64, sortType string, expectedIssueIDs []int64) {
+ labels, err := issues_model.GetLabelsByOrgID(db.DefaultContext, orgID, sortType, db.ListOptions{})
+ require.NoError(t, err)
+ assert.Len(t, labels, len(expectedIssueIDs))
+ for i, label := range labels {
+ assert.EqualValues(t, expectedIssueIDs[i], label.ID)
+ }
+ }
+ testSuccess(3, "leastissues", []int64{3, 4})
+ testSuccess(3, "mostissues", []int64{4, 3})
+ testSuccess(3, "reversealphabetically", []int64{4, 3})
+ testSuccess(3, "default", []int64{3, 4})
+
+ var err error
+ _, err = issues_model.GetLabelsByOrgID(db.DefaultContext, 0, "leastissues", db.ListOptions{})
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+
+ _, err = issues_model.GetLabelsByOrgID(db.DefaultContext, -1, "leastissues", db.ListOptions{})
+ assert.True(t, issues_model.IsErrOrgLabelNotExist(err))
+}
+
+//
+
+func TestGetLabelsByIssueID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ labels, err := issues_model.GetLabelsByIssueID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ if assert.Len(t, labels, 1) {
+ assert.EqualValues(t, 1, labels[0].ID)
+ }
+
+ labels, err = issues_model.GetLabelsByIssueID(db.DefaultContext, unittest.NonexistentID)
+ require.NoError(t, err)
+ assert.Empty(t, labels)
+}
+
+func TestUpdateLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ // make sure update won't overwrite it
+ update := &issues_model.Label{
+ ID: label.ID,
+ Color: "#ffff00",
+ Name: "newLabelName",
+ Description: label.Description,
+ Exclusive: false,
+ ArchivedUnix: timeutil.TimeStamp(0),
+ }
+ label.Color = update.Color
+ label.Name = update.Name
+ require.NoError(t, issues_model.UpdateLabel(db.DefaultContext, update))
+ newLabel := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ assert.EqualValues(t, label.ID, newLabel.ID)
+ assert.EqualValues(t, label.Color, newLabel.Color)
+ assert.EqualValues(t, label.Name, newLabel.Name)
+ assert.EqualValues(t, label.Description, newLabel.Description)
+ assert.EqualValues(t, 0, newLabel.ArchivedUnix)
+ unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
+}
+
+func TestDeleteLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ require.NoError(t, issues_model.DeleteLabel(db.DefaultContext, label.RepoID, label.ID))
+ unittest.AssertNotExistsBean(t, &issues_model.Label{ID: label.ID, RepoID: label.RepoID})
+
+ require.NoError(t, issues_model.DeleteLabel(db.DefaultContext, label.RepoID, label.ID))
+ unittest.AssertNotExistsBean(t, &issues_model.Label{ID: label.ID})
+
+ require.NoError(t, issues_model.DeleteLabel(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID))
+ unittest.CheckConsistencyFor(t, &issues_model.Label{}, &repo_model.Repository{})
+}
+
+func TestHasIssueLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ assert.True(t, issues_model.HasIssueLabel(db.DefaultContext, 1, 1))
+ assert.False(t, issues_model.HasIssueLabel(db.DefaultContext, 1, 2))
+ assert.False(t, issues_model.HasIssueLabel(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID))
+}
+
+func TestNewIssueLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 1})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ // add new IssueLabel
+ prevNumIssues := label.NumIssues
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, label, doer))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{
+ Type: issues_model.CommentTypeLabel,
+ PosterID: doer.ID,
+ IssueID: issue.ID,
+ LabelID: label.ID,
+ Content: "1",
+ })
+ label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
+ assert.EqualValues(t, prevNumIssues+1, label.NumIssues)
+
+ // re-add existing IssueLabel
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, label, doer))
+ unittest.CheckConsistencyFor(t, &issues_model.Issue{}, &issues_model.Label{})
+}
+
+func TestNewIssueExclusiveLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 18})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ otherLabel := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 6})
+ exclusiveLabelA := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 7})
+ exclusiveLabelB := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 8})
+
+ // coexisting regular and exclusive label
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, otherLabel, doer))
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, exclusiveLabelA, doer))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: otherLabel.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: exclusiveLabelA.ID})
+
+ // exclusive label replaces existing one
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, exclusiveLabelB, doer))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: otherLabel.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: exclusiveLabelB.ID})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: exclusiveLabelA.ID})
+
+ // exclusive label replaces existing one again
+ require.NoError(t, issues_model.NewIssueLabel(db.DefaultContext, issue, exclusiveLabelA, doer))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: otherLabel.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: exclusiveLabelA.ID})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: exclusiveLabelB.ID})
+}
+
+func TestNewIssueLabels(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ label1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ label2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 5})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+
+ require.NoError(t, issues_model.NewIssueLabels(db.DefaultContext, issue, []*issues_model.Label{label1, label2}, doer))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label1.ID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{
+ Type: issues_model.CommentTypeLabel,
+ PosterID: doer.ID,
+ IssueID: issue.ID,
+ LabelID: label1.ID,
+ Content: "1",
+ })
+ unittest.AssertExistsAndLoadBean(t, &issues_model.IssueLabel{IssueID: issue.ID, LabelID: label1.ID})
+ label1 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 1})
+ assert.EqualValues(t, 3, label1.NumIssues)
+ assert.EqualValues(t, 1, label1.NumClosedIssues)
+ label2 = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: 2})
+ assert.EqualValues(t, 1, label2.NumIssues)
+ assert.EqualValues(t, 1, label2.NumClosedIssues)
+
+ // corner case: test empty slice
+ require.NoError(t, issues_model.NewIssueLabels(db.DefaultContext, issue, []*issues_model.Label{}, doer))
+
+ unittest.CheckConsistencyFor(t, &issues_model.Issue{}, &issues_model.Label{})
+}
+
+func TestDeleteIssueLabel(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ testSuccess := func(labelID, issueID, doerID int64) {
+ label := unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID})
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: issueID})
+ doer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: doerID})
+
+ expectedNumIssues := label.NumIssues
+ expectedNumClosedIssues := label.NumClosedIssues
+ if unittest.BeanExists(t, &issues_model.IssueLabel{IssueID: issueID, LabelID: labelID}) {
+ expectedNumIssues--
+ if issue.IsClosed {
+ expectedNumClosedIssues--
+ }
+ }
+
+ ctx, committer, err := db.TxContext(db.DefaultContext)
+ defer committer.Close()
+ require.NoError(t, err)
+ require.NoError(t, issues_model.DeleteIssueLabel(ctx, issue, label, doer))
+ require.NoError(t, committer.Commit())
+
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: issueID, LabelID: labelID})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{
+ Type: issues_model.CommentTypeLabel,
+ PosterID: doerID,
+ IssueID: issueID,
+ LabelID: labelID,
+ }, `content=""`)
+ label = unittest.AssertExistsAndLoadBean(t, &issues_model.Label{ID: labelID})
+ assert.EqualValues(t, expectedNumIssues, label.NumIssues)
+ assert.EqualValues(t, expectedNumClosedIssues, label.NumClosedIssues)
+ }
+ testSuccess(1, 1, 2)
+ testSuccess(2, 5, 2)
+ testSuccess(1, 1, 2) // delete non-existent IssueLabel
+
+ unittest.CheckConsistencyFor(t, &issues_model.Issue{}, &issues_model.Label{})
+}
diff --git a/models/issues/main_test.go b/models/issues/main_test.go
new file mode 100644
index 0000000..baabd66
--- /dev/null
+++ b/models/issues/main_test.go
@@ -0,0 +1,33 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models"
+ _ "code.gitea.io/gitea/models/actions"
+ _ "code.gitea.io/gitea/models/activities"
+ _ "code.gitea.io/gitea/models/repo"
+ _ "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/require"
+)
+
+func TestFixturesAreConsistent(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ unittest.CheckConsistencyFor(t,
+ &issues_model.Issue{},
+ &issues_model.PullRequest{},
+ &issues_model.Milestone{},
+ &issues_model.Label{},
+ )
+}
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/models/issues/milestone.go b/models/issues/milestone.go
new file mode 100644
index 0000000..4b3cb0e
--- /dev/null
+++ b/models/issues/milestone.go
@@ -0,0 +1,394 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "html/template"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/optional"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrMilestoneNotExist represents a "MilestoneNotExist" kind of error.
+type ErrMilestoneNotExist struct {
+ ID int64
+ RepoID int64
+ Name string
+}
+
+// IsErrMilestoneNotExist checks if an error is a ErrMilestoneNotExist.
+func IsErrMilestoneNotExist(err error) bool {
+ _, ok := err.(ErrMilestoneNotExist)
+ return ok
+}
+
+func (err ErrMilestoneNotExist) Error() string {
+ if len(err.Name) > 0 {
+ return fmt.Sprintf("milestone does not exist [name: %s, repo_id: %d]", err.Name, err.RepoID)
+ }
+ return fmt.Sprintf("milestone does not exist [id: %d, repo_id: %d]", err.ID, err.RepoID)
+}
+
+func (err ErrMilestoneNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// Milestone represents a milestone of repository.
+type Milestone struct {
+ ID int64 `xorm:"pk autoincr"`
+ RepoID int64 `xorm:"INDEX"`
+ Repo *repo_model.Repository `xorm:"-"`
+ Name string
+ Content string `xorm:"TEXT"`
+ RenderedContent template.HTML `xorm:"-"`
+ IsClosed bool
+ NumIssues int
+ NumClosedIssues int
+ NumOpenIssues int `xorm:"-"`
+ Completeness int // Percentage(1-100).
+ IsOverdue bool `xorm:"-"`
+
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+ DeadlineUnix timeutil.TimeStamp
+ ClosedDateUnix timeutil.TimeStamp
+ DeadlineString string `xorm:"-"`
+
+ TotalTrackedTime int64 `xorm:"-"`
+}
+
+func init() {
+ db.RegisterModel(new(Milestone))
+}
+
+// BeforeUpdate is invoked from XORM before updating this object.
+func (m *Milestone) BeforeUpdate() {
+ if m.NumIssues > 0 {
+ m.Completeness = m.NumClosedIssues * 100 / m.NumIssues
+ } else {
+ m.Completeness = 0
+ }
+}
+
+// AfterLoad is invoked from XORM after setting the value of a field of
+// this object.
+func (m *Milestone) AfterLoad() {
+ m.NumOpenIssues = m.NumIssues - m.NumClosedIssues
+ if m.DeadlineUnix.Year() == 9999 {
+ return
+ }
+
+ m.DeadlineString = m.DeadlineUnix.FormatDate()
+ if m.IsClosed {
+ m.IsOverdue = m.ClosedDateUnix >= m.DeadlineUnix
+ } else {
+ m.IsOverdue = timeutil.TimeStampNow() >= m.DeadlineUnix
+ }
+}
+
+// State returns string representation of milestone status.
+func (m *Milestone) State() api.StateType {
+ if m.IsClosed {
+ return api.StateClosed
+ }
+ return api.StateOpen
+}
+
+// NewMilestone creates new milestone of repository.
+func NewMilestone(ctx context.Context, m *Milestone) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ m.Name = strings.TrimSpace(m.Name)
+
+ if err = db.Insert(ctx, m); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + 1 WHERE id = ?", m.RepoID); err != nil {
+ return err
+ }
+ return committer.Commit()
+}
+
+// HasMilestoneByRepoID returns if the milestone exists in the repository.
+func HasMilestoneByRepoID(ctx context.Context, repoID, id int64) (bool, error) {
+ return db.GetEngine(ctx).ID(id).Where("repo_id=?", repoID).Exist(new(Milestone))
+}
+
+// GetMilestoneByRepoID returns the milestone in a repository.
+func GetMilestoneByRepoID(ctx context.Context, repoID, id int64) (*Milestone, error) {
+ m := new(Milestone)
+ has, err := db.GetEngine(ctx).ID(id).Where("repo_id=?", repoID).Get(m)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrMilestoneNotExist{ID: id, RepoID: repoID}
+ }
+ return m, nil
+}
+
+// GetMilestoneByRepoIDANDName return a milestone if one exist by name and repo
+func GetMilestoneByRepoIDANDName(ctx context.Context, repoID int64, name string) (*Milestone, error) {
+ var mile Milestone
+ has, err := db.GetEngine(ctx).Where("repo_id=? AND name=?", repoID, name).Get(&mile)
+ if err != nil {
+ return nil, err
+ }
+ if !has {
+ return nil, ErrMilestoneNotExist{Name: name, RepoID: repoID}
+ }
+ return &mile, nil
+}
+
+// UpdateMilestone updates information of given milestone.
+func UpdateMilestone(ctx context.Context, m *Milestone, oldIsClosed bool) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if m.IsClosed && !oldIsClosed {
+ m.ClosedDateUnix = timeutil.TimeStampNow()
+ }
+
+ if err := updateMilestone(ctx, m); err != nil {
+ return err
+ }
+
+ // if IsClosed changed, update milestone numbers of repository
+ if oldIsClosed != m.IsClosed {
+ if err := updateRepoMilestoneNum(ctx, m.RepoID); err != nil {
+ return err
+ }
+ }
+
+ return committer.Commit()
+}
+
+func updateMilestone(ctx context.Context, m *Milestone) error {
+ m.Name = strings.TrimSpace(m.Name)
+ _, err := db.GetEngine(ctx).ID(m.ID).AllCols().Update(m)
+ if err != nil {
+ return err
+ }
+ return UpdateMilestoneCounters(ctx, m.ID)
+}
+
+func updateMilestoneCounters(ctx context.Context, id int64, noAutoTime bool, updatedUnix timeutil.TimeStamp) error {
+ e := db.GetEngine(ctx)
+ sess := e.ID(id).
+ SetExpr("num_issues", builder.Select("count(*)").From("issue").Where(
+ builder.Eq{"milestone_id": id},
+ )).
+ SetExpr("num_closed_issues", builder.Select("count(*)").From("issue").Where(
+ builder.Eq{
+ "milestone_id": id,
+ "is_closed": true,
+ },
+ ))
+ if noAutoTime {
+ sess.SetExpr("updated_unix", updatedUnix).NoAutoTime()
+ }
+ _, err := sess.Update(&Milestone{})
+ if err != nil {
+ return err
+ }
+ _, err = e.Exec("UPDATE `milestone` SET completeness=100*num_closed_issues/(CASE WHEN num_issues > 0 THEN num_issues ELSE 1 END) WHERE id=?",
+ id,
+ )
+ return err
+}
+
+// UpdateMilestoneCounters calculates NumIssues, NumClosesIssues and Completeness
+func UpdateMilestoneCounters(ctx context.Context, id int64) error {
+ return updateMilestoneCounters(ctx, id, false, 0)
+}
+
+// UpdateMilestoneCountersWithDate calculates NumIssues, NumClosesIssues and Completeness and set the UpdatedUnix date
+func UpdateMilestoneCountersWithDate(ctx context.Context, id int64, updatedUnix timeutil.TimeStamp) error {
+ return updateMilestoneCounters(ctx, id, true, updatedUnix)
+}
+
+// ChangeMilestoneStatusByRepoIDAndID changes a milestone open/closed status if the milestone ID is in the repo.
+func ChangeMilestoneStatusByRepoIDAndID(ctx context.Context, repoID, milestoneID int64, isClosed bool) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ m := &Milestone{
+ ID: milestoneID,
+ RepoID: repoID,
+ }
+
+ has, err := db.GetEngine(ctx).ID(milestoneID).Where("repo_id = ?", repoID).Get(m)
+ if err != nil {
+ return err
+ } else if !has {
+ return ErrMilestoneNotExist{ID: milestoneID, RepoID: repoID}
+ }
+
+ if err := changeMilestoneStatus(ctx, m, isClosed); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// ChangeMilestoneStatus changes the milestone open/closed status.
+func ChangeMilestoneStatus(ctx context.Context, m *Milestone, isClosed bool) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := changeMilestoneStatus(ctx, m, isClosed); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+func changeMilestoneStatus(ctx context.Context, m *Milestone, isClosed bool) error {
+ m.IsClosed = isClosed
+ if isClosed {
+ m.ClosedDateUnix = timeutil.TimeStampNow()
+ }
+
+ count, err := db.GetEngine(ctx).ID(m.ID).Where("repo_id = ? AND is_closed = ?", m.RepoID, !isClosed).Cols("is_closed", "closed_date_unix").Update(m)
+ if err != nil {
+ return err
+ }
+ if count < 1 {
+ return nil
+ }
+ return updateRepoMilestoneNum(ctx, m.RepoID)
+}
+
+// DeleteMilestoneByRepoID deletes a milestone from a repository.
+func DeleteMilestoneByRepoID(ctx context.Context, repoID, id int64) error {
+ m, err := GetMilestoneByRepoID(ctx, repoID, id)
+ if err != nil {
+ if IsErrMilestoneNotExist(err) {
+ return nil
+ }
+ return err
+ }
+
+ repo, err := repo_model.GetRepositoryByID(ctx, m.RepoID)
+ if err != nil {
+ return err
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if _, err = db.DeleteByID[Milestone](ctx, m.ID); err != nil {
+ return err
+ }
+
+ numMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
+ RepoID: repo.ID,
+ })
+ if err != nil {
+ return err
+ }
+ numClosedMilestones, err := db.Count[Milestone](ctx, FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ })
+ if err != nil {
+ return err
+ }
+ repo.NumMilestones = int(numMilestones)
+ repo.NumClosedMilestones = int(numClosedMilestones)
+
+ if _, err = db.GetEngine(ctx).ID(repo.ID).Cols("num_milestones, num_closed_milestones").Update(repo); err != nil {
+ return err
+ }
+
+ if _, err = db.Exec(ctx, "UPDATE `issue` SET milestone_id = 0 WHERE milestone_id = ?", m.ID); err != nil {
+ return err
+ }
+ return committer.Commit()
+}
+
+func updateRepoMilestoneNum(ctx context.Context, repoID int64) error {
+ _, err := db.GetEngine(ctx).Exec("UPDATE `repository` SET num_milestones=(SELECT count(*) FROM milestone WHERE repo_id=?),num_closed_milestones=(SELECT count(*) FROM milestone WHERE repo_id=? AND is_closed=?) WHERE id=?",
+ repoID,
+ repoID,
+ true,
+ repoID,
+ )
+ return err
+}
+
+// LoadTotalTrackedTime loads the tracked time for the milestone
+func (m *Milestone) LoadTotalTrackedTime(ctx context.Context) error {
+ type totalTimesByMilestone struct {
+ MilestoneID int64
+ Time int64
+ }
+ totalTime := &totalTimesByMilestone{MilestoneID: m.ID}
+ has, err := db.GetEngine(ctx).Table("issue").
+ Join("INNER", "milestone", "issue.milestone_id = milestone.id").
+ Join("LEFT", "tracked_time", "tracked_time.issue_id = issue.id").
+ Where("tracked_time.deleted = ?", false).
+ Select("milestone_id, sum(time) as time").
+ Where("milestone_id = ?", m.ID).
+ GroupBy("milestone_id").
+ Get(totalTime)
+ if err != nil {
+ return err
+ } else if !has {
+ return nil
+ }
+ m.TotalTrackedTime = totalTime.Time
+ return nil
+}
+
+// InsertMilestones creates milestones of repository.
+func InsertMilestones(ctx context.Context, ms ...*Milestone) (err error) {
+ if len(ms) == 0 {
+ return nil
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ // to return the id, so we should not use batch insert
+ for _, m := range ms {
+ if _, err = sess.NoAutoTime().Insert(m); err != nil {
+ return err
+ }
+ }
+
+ if _, err = db.Exec(ctx, "UPDATE `repository` SET num_milestones = num_milestones + ? WHERE id = ?", len(ms), ms[0].RepoID); err != nil {
+ return err
+ }
+ return committer.Commit()
+}
diff --git a/models/issues/milestone_list.go b/models/issues/milestone_list.go
new file mode 100644
index 0000000..955ab23
--- /dev/null
+++ b/models/issues/milestone_list.go
@@ -0,0 +1,195 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/optional"
+
+ "xorm.io/builder"
+)
+
+// MilestoneList is a list of milestones offering additional functionality
+type MilestoneList []*Milestone
+
+func (milestones MilestoneList) getMilestoneIDs() []int64 {
+ ids := make([]int64, 0, len(milestones))
+ for _, ms := range milestones {
+ ids = append(ids, ms.ID)
+ }
+ return ids
+}
+
+// FindMilestoneOptions contain options to get milestones
+type FindMilestoneOptions struct {
+ db.ListOptions
+ RepoID int64
+ IsClosed optional.Option[bool]
+ Name string
+ SortType string
+ RepoCond builder.Cond
+ RepoIDs []int64
+}
+
+func (opts FindMilestoneOptions) ToConds() builder.Cond {
+ cond := builder.NewCond()
+ if opts.RepoID != 0 {
+ cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
+ }
+ if opts.IsClosed.Has() {
+ cond = cond.And(builder.Eq{"is_closed": opts.IsClosed.Value()})
+ }
+ if opts.RepoCond != nil && opts.RepoCond.IsValid() {
+ cond = cond.And(builder.In("repo_id", builder.Select("id").From("repository").Where(opts.RepoCond)))
+ }
+ if len(opts.RepoIDs) > 0 {
+ cond = cond.And(builder.In("repo_id", opts.RepoIDs))
+ }
+ if len(opts.Name) != 0 {
+ cond = cond.And(db.BuildCaseInsensitiveLike("name", opts.Name))
+ }
+
+ return cond
+}
+
+func (opts FindMilestoneOptions) ToOrders() string {
+ switch opts.SortType {
+ case "furthestduedate":
+ return "deadline_unix DESC"
+ case "leastcomplete":
+ return "completeness ASC"
+ case "mostcomplete":
+ return "completeness DESC"
+ case "leastissues":
+ return "num_issues ASC"
+ case "mostissues":
+ return "num_issues DESC"
+ case "id":
+ return "id ASC"
+ case "name":
+ return "name DESC"
+ default:
+ return "deadline_unix ASC, name ASC"
+ }
+}
+
+// GetMilestoneIDsByNames returns a list of milestone ids by given names.
+// It doesn't filter them by repo, so it could return milestones belonging to different repos.
+// It's used for filtering issues via indexer, otherwise it would be useless.
+// Since it could return milestones with the same name, so the length of returned ids could be more than the length of names.
+func GetMilestoneIDsByNames(ctx context.Context, names []string) ([]int64, error) {
+ var ids []int64
+ return ids, db.GetEngine(ctx).Table("milestone").
+ Where(db.BuildCaseInsensitiveIn("name", names)).
+ Cols("id").
+ Find(&ids)
+}
+
+// LoadTotalTrackedTimes loads for every milestone in the list the TotalTrackedTime by a batch request
+func (milestones MilestoneList) LoadTotalTrackedTimes(ctx context.Context) error {
+ type totalTimesByMilestone struct {
+ MilestoneID int64
+ Time int64
+ }
+ if len(milestones) == 0 {
+ return nil
+ }
+ trackedTimes := make(map[int64]int64, len(milestones))
+
+ // Get total tracked time by milestone_id
+ rows, err := db.GetEngine(ctx).Table("issue").
+ Join("INNER", "milestone", "issue.milestone_id = milestone.id").
+ Join("LEFT", "tracked_time", "tracked_time.issue_id = issue.id").
+ Where("tracked_time.deleted = ?", false).
+ Select("milestone_id, sum(time) as time").
+ In("milestone_id", milestones.getMilestoneIDs()).
+ GroupBy("milestone_id").
+ Rows(new(totalTimesByMilestone))
+ if err != nil {
+ return err
+ }
+
+ defer rows.Close()
+
+ for rows.Next() {
+ var totalTime totalTimesByMilestone
+ err = rows.Scan(&totalTime)
+ if err != nil {
+ return err
+ }
+ trackedTimes[totalTime.MilestoneID] = totalTime.Time
+ }
+
+ for _, milestone := range milestones {
+ milestone.TotalTrackedTime = trackedTimes[milestone.ID]
+ }
+ return nil
+}
+
+// CountMilestonesByRepoCondAndKw map from repo conditions and the keyword of milestones' name to number of milestones matching the options`
+func CountMilestonesMap(ctx context.Context, opts FindMilestoneOptions) (map[int64]int64, error) {
+ sess := db.GetEngine(ctx).Where(opts.ToConds())
+
+ countsSlice := make([]*struct {
+ RepoID int64
+ Count int64
+ }, 0, 10)
+ if err := sess.GroupBy("repo_id").
+ Select("repo_id AS repo_id, COUNT(*) AS count").
+ Table("milestone").
+ Find(&countsSlice); err != nil {
+ return nil, err
+ }
+
+ countMap := make(map[int64]int64, len(countsSlice))
+ for _, c := range countsSlice {
+ countMap[c.RepoID] = c.Count
+ }
+ return countMap, nil
+}
+
+// MilestonesStats represents milestone statistic information.
+type MilestonesStats struct {
+ OpenCount, ClosedCount int64
+}
+
+// Total returns the total counts of milestones
+func (m MilestonesStats) Total() int64 {
+ return m.OpenCount + m.ClosedCount
+}
+
+// GetMilestonesStatsByRepoCondAndKw returns milestone statistic information for dashboard by given repo conditions and name keyword.
+func GetMilestonesStatsByRepoCondAndKw(ctx context.Context, repoCond builder.Cond, keyword string) (*MilestonesStats, error) {
+ var err error
+ stats := &MilestonesStats{}
+
+ sess := db.GetEngine(ctx).Where("is_closed = ?", false)
+ if len(keyword) > 0 {
+ sess = sess.And(builder.Like{"UPPER(name)", strings.ToUpper(keyword)})
+ }
+ if repoCond.IsValid() {
+ sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
+ }
+ stats.OpenCount, err = sess.Count(new(Milestone))
+ if err != nil {
+ return nil, err
+ }
+
+ sess = db.GetEngine(ctx).Where("is_closed = ?", true)
+ if len(keyword) > 0 {
+ sess = sess.And(builder.Like{"UPPER(name)", strings.ToUpper(keyword)})
+ }
+ if repoCond.IsValid() {
+ sess.And(builder.In("repo_id", builder.Select("id").From("repository").Where(repoCond)))
+ }
+ stats.ClosedCount, err = sess.Count(new(Milestone))
+ if err != nil {
+ return nil, err
+ }
+
+ return stats, nil
+}
diff --git a/models/issues/milestone_test.go b/models/issues/milestone_test.go
new file mode 100644
index 0000000..314cba3
--- /dev/null
+++ b/models/issues/milestone_test.go
@@ -0,0 +1,371 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "sort"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMilestone_State(t *testing.T) {
+ assert.Equal(t, api.StateOpen, (&issues_model.Milestone{IsClosed: false}).State())
+ assert.Equal(t, api.StateClosed, (&issues_model.Milestone{IsClosed: true}).State())
+}
+
+func TestGetMilestoneByRepoID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ milestone, err := issues_model.GetMilestoneByRepoID(db.DefaultContext, 1, 1)
+ require.NoError(t, err)
+ assert.EqualValues(t, 1, milestone.ID)
+ assert.EqualValues(t, 1, milestone.RepoID)
+
+ _, err = issues_model.GetMilestoneByRepoID(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID)
+ assert.True(t, issues_model.IsErrMilestoneNotExist(err))
+}
+
+func TestGetMilestonesByRepoID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ test := func(repoID int64, state api.StateType) {
+ var isClosed optional.Option[bool]
+ switch state {
+ case api.StateClosed, api.StateOpen:
+ isClosed = optional.Some(state == api.StateClosed)
+ }
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: isClosed,
+ })
+ require.NoError(t, err)
+
+ var n int
+
+ switch state {
+ case api.StateClosed:
+ n = repo.NumClosedMilestones
+
+ case api.StateAll:
+ n = repo.NumMilestones
+
+ case api.StateOpen:
+ fallthrough
+
+ default:
+ n = repo.NumOpenMilestones
+ }
+
+ assert.Len(t, milestones, n)
+ for _, milestone := range milestones {
+ assert.EqualValues(t, repoID, milestone.RepoID)
+ }
+ }
+ test(1, api.StateOpen)
+ test(1, api.StateAll)
+ test(1, api.StateClosed)
+ test(2, api.StateOpen)
+ test(2, api.StateAll)
+ test(2, api.StateClosed)
+ test(3, api.StateOpen)
+ test(3, api.StateClosed)
+ test(3, api.StateAll)
+
+ milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: unittest.NonexistentID,
+ IsClosed: optional.Some(false),
+ })
+ require.NoError(t, err)
+ assert.Empty(t, milestones)
+}
+
+func TestGetMilestones(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ test := func(sortType string, sortCond func(*issues_model.Milestone) int) {
+ for _, page := range []int{0, 1} {
+ milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ SortType: sortType,
+ })
+ require.NoError(t, err)
+ assert.Len(t, milestones, repo.NumMilestones-repo.NumClosedMilestones)
+ values := make([]int, len(milestones))
+ for i, milestone := range milestones {
+ values[i] = sortCond(milestone)
+ }
+ assert.True(t, sort.IntsAreSorted(values))
+
+ milestones, err = db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ Name: "",
+ SortType: sortType,
+ })
+ require.NoError(t, err)
+ assert.Len(t, milestones, repo.NumClosedMilestones)
+ values = make([]int, len(milestones))
+ for i, milestone := range milestones {
+ values[i] = sortCond(milestone)
+ }
+ assert.True(t, sort.IntsAreSorted(values))
+ }
+ }
+ test("furthestduedate", func(milestone *issues_model.Milestone) int {
+ return -int(milestone.DeadlineUnix)
+ })
+ test("leastcomplete", func(milestone *issues_model.Milestone) int {
+ return milestone.Completeness
+ })
+ test("mostcomplete", func(milestone *issues_model.Milestone) int {
+ return -milestone.Completeness
+ })
+ test("leastissues", func(milestone *issues_model.Milestone) int {
+ return milestone.NumIssues
+ })
+ test("mostissues", func(milestone *issues_model.Milestone) int {
+ return -milestone.NumIssues
+ })
+ test("soonestduedate", func(milestone *issues_model.Milestone) int {
+ return int(milestone.DeadlineUnix)
+ })
+}
+
+func TestCountRepoMilestones(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ test := func(repoID int64) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ count, err := db.Count[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: repoID,
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, repo.NumMilestones, count)
+ }
+ test(1)
+ test(2)
+ test(3)
+
+ count, err := db.Count[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: unittest.NonexistentID,
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, 0, count)
+}
+
+func TestCountRepoClosedMilestones(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ test := func(repoID int64) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ count, err := db.Count[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: repoID,
+ IsClosed: optional.Some(true),
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, repo.NumClosedMilestones, count)
+ }
+ test(1)
+ test(2)
+ test(3)
+
+ count, err := db.Count[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoID: unittest.NonexistentID,
+ IsClosed: optional.Some(true),
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, 0, count)
+}
+
+func TestCountMilestonesByRepoIDs(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ milestonesCount := func(repoID int64) (int, int) {
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repoID})
+ return repo.NumOpenMilestones, repo.NumClosedMilestones
+ }
+ repo1OpenCount, repo1ClosedCount := milestonesCount(1)
+ repo2OpenCount, repo2ClosedCount := milestonesCount(2)
+
+ openCounts, err := issues_model.CountMilestonesMap(db.DefaultContext, issues_model.FindMilestoneOptions{
+ RepoIDs: []int64{1, 2},
+ IsClosed: optional.Some(false),
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, repo1OpenCount, openCounts[1])
+ assert.EqualValues(t, repo2OpenCount, openCounts[2])
+
+ closedCounts, err := issues_model.CountMilestonesMap(db.DefaultContext,
+ issues_model.FindMilestoneOptions{
+ RepoIDs: []int64{1, 2},
+ IsClosed: optional.Some(true),
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, repo1ClosedCount, closedCounts[1])
+ assert.EqualValues(t, repo2ClosedCount, closedCounts[2])
+}
+
+func TestGetMilestonesByRepoIDs(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo1 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ repo2 := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+ test := func(sortType string, sortCond func(*issues_model.Milestone) int) {
+ for _, page := range []int{0, 1} {
+ openMilestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoIDs: []int64{repo1.ID, repo2.ID},
+ IsClosed: optional.Some(false),
+ SortType: sortType,
+ })
+ require.NoError(t, err)
+ assert.Len(t, openMilestones, repo1.NumOpenMilestones+repo2.NumOpenMilestones)
+ values := make([]int, len(openMilestones))
+ for i, milestone := range openMilestones {
+ values[i] = sortCond(milestone)
+ }
+ assert.True(t, sort.IntsAreSorted(values))
+
+ closedMilestones, err := db.Find[issues_model.Milestone](db.DefaultContext,
+ issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoIDs: []int64{repo1.ID, repo2.ID},
+ IsClosed: optional.Some(true),
+ SortType: sortType,
+ })
+ require.NoError(t, err)
+ assert.Len(t, closedMilestones, repo1.NumClosedMilestones+repo2.NumClosedMilestones)
+ values = make([]int, len(closedMilestones))
+ for i, milestone := range closedMilestones {
+ values[i] = sortCond(milestone)
+ }
+ assert.True(t, sort.IntsAreSorted(values))
+ }
+ }
+ test("furthestduedate", func(milestone *issues_model.Milestone) int {
+ return -int(milestone.DeadlineUnix)
+ })
+ test("leastcomplete", func(milestone *issues_model.Milestone) int {
+ return milestone.Completeness
+ })
+ test("mostcomplete", func(milestone *issues_model.Milestone) int {
+ return -milestone.Completeness
+ })
+ test("leastissues", func(milestone *issues_model.Milestone) int {
+ return milestone.NumIssues
+ })
+ test("mostissues", func(milestone *issues_model.Milestone) int {
+ return -milestone.NumIssues
+ })
+ test("soonestduedate", func(milestone *issues_model.Milestone) int {
+ return int(milestone.DeadlineUnix)
+ })
+}
+
+func TestNewMilestone(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ milestone := &issues_model.Milestone{
+ RepoID: 1,
+ Name: "milestoneName",
+ Content: "milestoneContent",
+ }
+
+ require.NoError(t, issues_model.NewMilestone(db.DefaultContext, milestone))
+ unittest.AssertExistsAndLoadBean(t, milestone)
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{})
+}
+
+func TestChangeMilestoneStatus(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
+
+ require.NoError(t, issues_model.ChangeMilestoneStatus(db.DefaultContext, milestone, true))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}, "is_closed=1")
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{})
+
+ require.NoError(t, issues_model.ChangeMilestoneStatus(db.DefaultContext, milestone, false))
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1}, "is_closed=0")
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: milestone.RepoID}, &issues_model.Milestone{})
+}
+
+func TestDeleteMilestoneByRepoID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ require.NoError(t, issues_model.DeleteMilestoneByRepoID(db.DefaultContext, 1, 1))
+ unittest.AssertNotExistsBean(t, &issues_model.Milestone{ID: 1})
+ unittest.CheckConsistencyFor(t, &repo_model.Repository{ID: 1})
+
+ require.NoError(t, issues_model.DeleteMilestoneByRepoID(db.DefaultContext, unittest.NonexistentID, unittest.NonexistentID))
+}
+
+func TestUpdateMilestone(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ milestone := unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
+ milestone.Name = " newMilestoneName "
+ milestone.Content = "newMilestoneContent"
+ require.NoError(t, issues_model.UpdateMilestone(db.DefaultContext, milestone, milestone.IsClosed))
+ milestone = unittest.AssertExistsAndLoadBean(t, &issues_model.Milestone{ID: 1})
+ assert.EqualValues(t, "newMilestoneName", milestone.Name)
+ unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
+}
+
+func TestUpdateMilestoneCounters(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{MilestoneID: 1},
+ "is_closed=0")
+
+ issue.IsClosed = true
+ issue.ClosedUnix = timeutil.TimeStampNow()
+ _, err := db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue)
+ require.NoError(t, err)
+ require.NoError(t, issues_model.UpdateMilestoneCounters(db.DefaultContext, issue.MilestoneID))
+ unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
+
+ issue.IsClosed = false
+ issue.ClosedUnix = 0
+ _, err = db.GetEngine(db.DefaultContext).ID(issue.ID).Cols("is_closed", "closed_unix").Update(issue)
+ require.NoError(t, err)
+ require.NoError(t, issues_model.UpdateMilestoneCounters(db.DefaultContext, issue.MilestoneID))
+ unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
+}
+
+func TestMigrate_InsertMilestones(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ reponame := "repo1"
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame})
+ name := "milestonetest1"
+ ms := &issues_model.Milestone{
+ RepoID: repo.ID,
+ Name: name,
+ }
+ err := issues_model.InsertMilestones(db.DefaultContext, ms)
+ require.NoError(t, err)
+ unittest.AssertExistsAndLoadBean(t, ms)
+ repoModified := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: repo.ID})
+ assert.EqualValues(t, repo.NumMilestones+1, repoModified.NumMilestones)
+
+ unittest.CheckConsistencyFor(t, &issues_model.Milestone{})
+}
diff --git a/models/issues/pull.go b/models/issues/pull.go
new file mode 100644
index 0000000..45e2e19
--- /dev/null
+++ b/models/issues/pull.go
@@ -0,0 +1,1105 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ org_model "code.gitea.io/gitea/models/organization"
+ pull_model "code.gitea.io/gitea/models/pull"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrPullRequestNotExist represents a "PullRequestNotExist" kind of error.
+type ErrPullRequestNotExist struct {
+ ID int64
+ IssueID int64
+ HeadRepoID int64
+ BaseRepoID int64
+ HeadBranch string
+ BaseBranch string
+}
+
+// IsErrPullRequestNotExist checks if an error is a ErrPullRequestNotExist.
+func IsErrPullRequestNotExist(err error) bool {
+ _, ok := err.(ErrPullRequestNotExist)
+ return ok
+}
+
+func (err ErrPullRequestNotExist) Error() string {
+ return fmt.Sprintf("pull request does not exist [id: %d, issue_id: %d, head_repo_id: %d, base_repo_id: %d, head_branch: %s, base_branch: %s]",
+ err.ID, err.IssueID, err.HeadRepoID, err.BaseRepoID, err.HeadBranch, err.BaseBranch)
+}
+
+func (err ErrPullRequestNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrPullRequestAlreadyExists represents a "PullRequestAlreadyExists"-error
+type ErrPullRequestAlreadyExists struct {
+ ID int64
+ IssueID int64
+ HeadRepoID int64
+ BaseRepoID int64
+ HeadBranch string
+ BaseBranch string
+}
+
+// IsErrPullRequestAlreadyExists checks if an error is a ErrPullRequestAlreadyExists.
+func IsErrPullRequestAlreadyExists(err error) bool {
+ _, ok := err.(ErrPullRequestAlreadyExists)
+ return ok
+}
+
+// Error does pretty-printing :D
+func (err ErrPullRequestAlreadyExists) Error() string {
+ return fmt.Sprintf("pull request already exists for these targets [id: %d, issue_id: %d, head_repo_id: %d, base_repo_id: %d, head_branch: %s, base_branch: %s]",
+ err.ID, err.IssueID, err.HeadRepoID, err.BaseRepoID, err.HeadBranch, err.BaseBranch)
+}
+
+func (err ErrPullRequestAlreadyExists) Unwrap() error {
+ return util.ErrAlreadyExist
+}
+
+// ErrPullWasClosed is used close a closed pull request
+type ErrPullWasClosed struct {
+ ID int64
+ Index int64
+}
+
+// IsErrPullWasClosed checks if an error is a ErrErrPullWasClosed.
+func IsErrPullWasClosed(err error) bool {
+ _, ok := err.(ErrPullWasClosed)
+ return ok
+}
+
+func (err ErrPullWasClosed) Error() string {
+ return fmt.Sprintf("Pull request [%d] %d was already closed", err.ID, err.Index)
+}
+
+// PullRequestType defines pull request type
+type PullRequestType int
+
+// Enumerate all the pull request types
+const (
+ PullRequestGitea PullRequestType = iota
+ PullRequestGit
+)
+
+// PullRequestStatus defines pull request status
+type PullRequestStatus int
+
+// Enumerate all the pull request status
+const (
+ PullRequestStatusConflict PullRequestStatus = iota
+ PullRequestStatusChecking
+ PullRequestStatusMergeable
+ PullRequestStatusManuallyMerged
+ PullRequestStatusError
+ PullRequestStatusEmpty
+ PullRequestStatusAncestor
+)
+
+func (status PullRequestStatus) String() string {
+ switch status {
+ case PullRequestStatusConflict:
+ return "CONFLICT"
+ case PullRequestStatusChecking:
+ return "CHECKING"
+ case PullRequestStatusMergeable:
+ return "MERGEABLE"
+ case PullRequestStatusManuallyMerged:
+ return "MANUALLY_MERGED"
+ case PullRequestStatusError:
+ return "ERROR"
+ case PullRequestStatusEmpty:
+ return "EMPTY"
+ case PullRequestStatusAncestor:
+ return "ANCESTOR"
+ default:
+ return strconv.Itoa(int(status))
+ }
+}
+
+// PullRequestFlow the flow of pull request
+type PullRequestFlow int
+
+const (
+ // PullRequestFlowGithub github flow from head branch to base branch
+ PullRequestFlowGithub PullRequestFlow = iota
+ // PullRequestFlowAGit Agit flow pull request, head branch is not exist
+ PullRequestFlowAGit
+)
+
+// PullRequest represents relation between pull request and repositories.
+type PullRequest struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type PullRequestType
+ Status PullRequestStatus
+ ConflictedFiles []string `xorm:"TEXT JSON"`
+ CommitsAhead int
+ CommitsBehind int
+
+ ChangedProtectedFiles []string `xorm:"TEXT JSON"`
+
+ IssueID int64 `xorm:"INDEX"`
+ Issue *Issue `xorm:"-"`
+ Index int64
+ RequestedReviewers []*user_model.User `xorm:"-"`
+ RequestedReviewersTeams []*org_model.Team `xorm:"-"`
+ isRequestedReviewersLoaded bool `xorm:"-"`
+
+ HeadRepoID int64 `xorm:"INDEX"`
+ HeadRepo *repo_model.Repository `xorm:"-"`
+ BaseRepoID int64 `xorm:"INDEX"`
+ BaseRepo *repo_model.Repository `xorm:"-"`
+ HeadBranch string
+ HeadCommitID string `xorm:"-"`
+ BaseBranch string
+ MergeBase string `xorm:"VARCHAR(64)"`
+ AllowMaintainerEdit bool `xorm:"NOT NULL DEFAULT false"`
+
+ HasMerged bool `xorm:"INDEX"`
+ MergedCommitID string `xorm:"VARCHAR(64)"`
+ MergerID int64 `xorm:"INDEX"`
+ Merger *user_model.User `xorm:"-"`
+ MergedUnix timeutil.TimeStamp `xorm:"updated INDEX"`
+
+ isHeadRepoLoaded bool `xorm:"-"`
+
+ Flow PullRequestFlow `xorm:"NOT NULL DEFAULT 0"`
+}
+
+func init() {
+ db.RegisterModel(new(PullRequest))
+}
+
+// DeletePullsByBaseRepoID deletes all pull requests by the base repository ID
+func DeletePullsByBaseRepoID(ctx context.Context, repoID int64) error {
+ deleteCond := builder.Select("id").From("pull_request").Where(builder.Eq{"pull_request.base_repo_id": repoID})
+
+ // Delete scheduled auto merges
+ if _, err := db.GetEngine(ctx).In("pull_id", deleteCond).
+ Delete(&pull_model.AutoMerge{}); err != nil {
+ return err
+ }
+
+ // Delete review states
+ if _, err := db.GetEngine(ctx).In("pull_id", deleteCond).
+ Delete(&pull_model.ReviewState{}); err != nil {
+ return err
+ }
+
+ _, err := db.DeleteByBean(ctx, &PullRequest{BaseRepoID: repoID})
+ return err
+}
+
+func (pr *PullRequest) String() string {
+ if pr == nil {
+ return "<PullRequest nil>"
+ }
+
+ s := new(strings.Builder)
+ fmt.Fprintf(s, "<PullRequest [%d]", pr.ID)
+ if pr.BaseRepo != nil {
+ fmt.Fprintf(s, "%s#%d[%s...", pr.BaseRepo.FullName(), pr.Index, pr.BaseBranch)
+ } else {
+ fmt.Fprintf(s, "Repo[%d]#%d[%s...", pr.BaseRepoID, pr.Index, pr.BaseBranch)
+ }
+ if pr.HeadRepoID == pr.BaseRepoID {
+ fmt.Fprintf(s, "%s]", pr.HeadBranch)
+ } else if pr.HeadRepo != nil {
+ fmt.Fprintf(s, "%s:%s]", pr.HeadRepo.FullName(), pr.HeadBranch)
+ } else {
+ fmt.Fprintf(s, "Repo[%d]:%s]", pr.HeadRepoID, pr.HeadBranch)
+ }
+ s.WriteByte('>')
+ return s.String()
+}
+
+// MustHeadUserName returns the HeadRepo's username if failed return blank
+func (pr *PullRequest) MustHeadUserName(ctx context.Context) string {
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ if !repo_model.IsErrRepoNotExist(err) {
+ log.Error("LoadHeadRepo: %v", err)
+ } else {
+ log.Warn("LoadHeadRepo %d but repository does not exist: %v", pr.HeadRepoID, err)
+ }
+ return ""
+ }
+ if pr.HeadRepo == nil {
+ return ""
+ }
+ return pr.HeadRepo.OwnerName
+}
+
+// LoadAttributes loads pull request attributes from database
+// Note: don't try to get Issue because will end up recursive querying.
+func (pr *PullRequest) LoadAttributes(ctx context.Context) (err error) {
+ if pr.HasMerged && pr.Merger == nil {
+ pr.Merger, err = user_model.GetUserByID(ctx, pr.MergerID)
+ if user_model.IsErrUserNotExist(err) {
+ pr.MergerID = user_model.GhostUserID
+ pr.Merger = user_model.NewGhostUser()
+ } else if err != nil {
+ return fmt.Errorf("getUserByID [%d]: %w", pr.MergerID, err)
+ }
+ }
+
+ return nil
+}
+
+// LoadHeadRepo loads the head repository, pr.HeadRepo will remain nil if it does not exist
+// and thus ErrRepoNotExist will never be returned
+func (pr *PullRequest) LoadHeadRepo(ctx context.Context) (err error) {
+ if !pr.isHeadRepoLoaded && pr.HeadRepo == nil && pr.HeadRepoID > 0 {
+ if pr.HeadRepoID == pr.BaseRepoID {
+ if pr.BaseRepo != nil {
+ pr.HeadRepo = pr.BaseRepo
+ return nil
+ } else if pr.Issue != nil && pr.Issue.Repo != nil {
+ pr.HeadRepo = pr.Issue.Repo
+ return nil
+ }
+ }
+
+ pr.HeadRepo, err = repo_model.GetRepositoryByID(ctx, pr.HeadRepoID)
+ if err != nil && !repo_model.IsErrRepoNotExist(err) { // Head repo maybe deleted, but it should still work
+ return fmt.Errorf("pr[%d].LoadHeadRepo[%d]: %w", pr.ID, pr.HeadRepoID, err)
+ }
+ pr.isHeadRepoLoaded = true
+ }
+ return nil
+}
+
+// LoadRequestedReviewers loads the requested reviewers.
+func (pr *PullRequest) LoadRequestedReviewers(ctx context.Context) error {
+ if pr.isRequestedReviewersLoaded || len(pr.RequestedReviewers) > 0 {
+ return nil
+ }
+
+ reviews, err := GetReviewsByIssueID(ctx, pr.Issue.ID)
+ if err != nil {
+ return err
+ }
+ if err = reviews.LoadReviewers(ctx); err != nil {
+ return err
+ }
+ pr.isRequestedReviewersLoaded = true
+ for _, review := range reviews {
+ if review.ReviewerID != 0 {
+ pr.RequestedReviewers = append(pr.RequestedReviewers, review.Reviewer)
+ }
+ }
+
+ return nil
+}
+
+// LoadRequestedReviewersTeams loads the requested reviewers teams.
+func (pr *PullRequest) LoadRequestedReviewersTeams(ctx context.Context) error {
+ reviews, err := GetReviewsByIssueID(ctx, pr.Issue.ID)
+ if err != nil {
+ return err
+ }
+ if err = reviews.LoadReviewersTeams(ctx); err != nil {
+ return err
+ }
+
+ for _, review := range reviews {
+ if review.ReviewerTeamID != 0 {
+ pr.RequestedReviewersTeams = append(pr.RequestedReviewersTeams, review.ReviewerTeam)
+ }
+ }
+
+ return nil
+}
+
+// LoadBaseRepo loads the target repository. ErrRepoNotExist may be returned.
+func (pr *PullRequest) LoadBaseRepo(ctx context.Context) (err error) {
+ if pr.BaseRepo != nil {
+ return nil
+ }
+
+ if pr.HeadRepoID == pr.BaseRepoID && pr.HeadRepo != nil {
+ pr.BaseRepo = pr.HeadRepo
+ return nil
+ }
+
+ if pr.Issue != nil && pr.Issue.Repo != nil {
+ pr.BaseRepo = pr.Issue.Repo
+ return nil
+ }
+
+ pr.BaseRepo, err = repo_model.GetRepositoryByID(ctx, pr.BaseRepoID)
+ if err != nil {
+ return fmt.Errorf("pr[%d].LoadBaseRepo[%d]: %w", pr.ID, pr.BaseRepoID, err)
+ }
+ return nil
+}
+
+// LoadIssue loads issue information from database
+func (pr *PullRequest) LoadIssue(ctx context.Context) (err error) {
+ if pr.Issue != nil {
+ return nil
+ }
+
+ pr.Issue, err = GetIssueByID(ctx, pr.IssueID)
+ if err == nil {
+ pr.Issue.PullRequest = pr
+ }
+ return err
+}
+
+// ReviewCount represents a count of Reviews
+type ReviewCount struct {
+ IssueID int64
+ Type ReviewType
+ Count int64
+}
+
+// GetApprovalCounts returns the approval counts by type
+// FIXME: Only returns official counts due to double counting of non-official counts
+func (pr *PullRequest) GetApprovalCounts(ctx context.Context) ([]*ReviewCount, error) {
+ rCounts := make([]*ReviewCount, 0, 6)
+ sess := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID)
+ return rCounts, sess.Select("issue_id, type, count(id) as `count`").Where("official = ? AND dismissed = ?", true, false).GroupBy("issue_id, type").Table("review").Find(&rCounts)
+}
+
+// GetApprovers returns the approvers of the pull request
+func (pr *PullRequest) GetApprovers(ctx context.Context) string {
+ stringBuilder := strings.Builder{}
+ if err := pr.getReviewedByLines(ctx, &stringBuilder); err != nil {
+ log.Error("Unable to getReviewedByLines: Error: %v", err)
+ return ""
+ }
+
+ return stringBuilder.String()
+}
+
+func (pr *PullRequest) getReviewedByLines(ctx context.Context, writer io.Writer) error {
+ maxReviewers := setting.Repository.PullRequest.DefaultMergeMessageMaxApprovers
+
+ if maxReviewers == 0 {
+ return nil
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ // Note: This doesn't page as we only expect a very limited number of reviews
+ reviews, err := FindLatestReviews(ctx, FindReviewOptions{
+ Types: []ReviewType{ReviewTypeApprove},
+ IssueID: pr.IssueID,
+ OfficialOnly: setting.Repository.PullRequest.DefaultMergeMessageOfficialApproversOnly,
+ })
+ if err != nil {
+ log.Error("Unable to FindReviews for PR ID %d: %v", pr.ID, err)
+ return err
+ }
+
+ reviewersWritten := 0
+
+ for _, review := range reviews {
+ if maxReviewers > 0 && reviewersWritten > maxReviewers {
+ break
+ }
+
+ if err := review.LoadReviewer(ctx); err != nil && !user_model.IsErrUserNotExist(err) {
+ log.Error("Unable to LoadReviewer[%d] for PR ID %d : %v", review.ReviewerID, pr.ID, err)
+ return err
+ } else if review.Reviewer == nil {
+ continue
+ }
+ if _, err := writer.Write([]byte("Reviewed-by: ")); err != nil {
+ return err
+ }
+ if _, err := writer.Write([]byte(review.Reviewer.NewGitSig().String())); err != nil {
+ return err
+ }
+ if _, err := writer.Write([]byte{'\n'}); err != nil {
+ return err
+ }
+ reviewersWritten++
+ }
+ return committer.Commit()
+}
+
+// GetGitRefName returns git ref for hidden pull request branch
+func (pr *PullRequest) GetGitRefName() string {
+ return fmt.Sprintf("%s%d/head", git.PullPrefix, pr.Index)
+}
+
+func (pr *PullRequest) GetGitHeadBranchRefName() string {
+ return fmt.Sprintf("%s%s", git.BranchPrefix, pr.HeadBranch)
+}
+
+// GetReviewCommentsCount returns the number of review comments made on the diff of a PR review (not including comments on commits or issues in a PR)
+func (pr *PullRequest) GetReviewCommentsCount(ctx context.Context) int {
+ opts := FindCommentsOptions{
+ Type: CommentTypeReview,
+ IssueID: pr.IssueID,
+ }
+ conds := opts.ToConds()
+
+ count, err := db.GetEngine(ctx).Where(conds).Count(new(Comment))
+ if err != nil {
+ return 0
+ }
+ return int(count)
+}
+
+// IsChecking returns true if this pull request is still checking conflict.
+func (pr *PullRequest) IsChecking() bool {
+ return pr.Status == PullRequestStatusChecking
+}
+
+// CanAutoMerge returns true if this pull request can be merged automatically.
+func (pr *PullRequest) CanAutoMerge() bool {
+ return pr.Status == PullRequestStatusMergeable
+}
+
+// IsEmpty returns true if this pull request is empty.
+func (pr *PullRequest) IsEmpty() bool {
+ return pr.Status == PullRequestStatusEmpty
+}
+
+// IsAncestor returns true if the Head Commit of this PR is an ancestor of the Base Commit
+func (pr *PullRequest) IsAncestor() bool {
+ return pr.Status == PullRequestStatusAncestor
+}
+
+// IsFromFork return true if this PR is from a fork.
+func (pr *PullRequest) IsFromFork() bool {
+ return pr.HeadRepoID != pr.BaseRepoID
+}
+
+// SetMerged sets a pull request to merged and closes the corresponding issue
+func (pr *PullRequest) SetMerged(ctx context.Context) (bool, error) {
+ if pr.HasMerged {
+ return false, fmt.Errorf("PullRequest[%d] already merged", pr.Index)
+ }
+ if pr.MergedCommitID == "" || pr.MergedUnix == 0 || pr.Merger == nil {
+ return false, fmt.Errorf("Unable to merge PullRequest[%d], some required fields are empty", pr.Index)
+ }
+
+ pr.HasMerged = true
+ sess := db.GetEngine(ctx)
+
+ if _, err := sess.Exec("UPDATE `issue` SET `repo_id` = `repo_id` WHERE `id` = ?", pr.IssueID); err != nil {
+ return false, err
+ }
+
+ if _, err := sess.Exec("UPDATE `pull_request` SET `issue_id` = `issue_id` WHERE `id` = ?", pr.ID); err != nil {
+ return false, err
+ }
+
+ pr.Issue = nil
+ if err := pr.LoadIssue(ctx); err != nil {
+ return false, err
+ }
+
+ if tmpPr, err := GetPullRequestByID(ctx, pr.ID); err != nil {
+ return false, err
+ } else if tmpPr.HasMerged {
+ if pr.Issue.IsClosed {
+ return false, nil
+ }
+ return false, fmt.Errorf("PullRequest[%d] already merged but it's associated issue [%d] is not closed", pr.Index, pr.IssueID)
+ } else if pr.Issue.IsClosed {
+ return false, fmt.Errorf("PullRequest[%d] already closed", pr.Index)
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ return false, err
+ }
+
+ if err := pr.Issue.Repo.LoadOwner(ctx); err != nil {
+ return false, err
+ }
+
+ if _, err := changeIssueStatus(ctx, pr.Issue, pr.Merger, true, true); err != nil {
+ return false, fmt.Errorf("Issue.changeStatus: %w", err)
+ }
+
+ // reset the conflicted files as there cannot be any if we're merged
+ pr.ConflictedFiles = []string{}
+
+ // We need to save all of the data used to compute this merge as it may have already been changed by TestPatch. FIXME: need to set some state to prevent TestPatch from running whilst we are merging.
+ if _, err := sess.Where("id = ?", pr.ID).Cols("has_merged, status, merge_base, merged_commit_id, merger_id, merged_unix, conflicted_files").Update(pr); err != nil {
+ return false, fmt.Errorf("Failed to update pr[%d]: %w", pr.ID, err)
+ }
+
+ return true, nil
+}
+
+// NewPullRequest creates new pull request with labels for repository.
+func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *Issue, labelIDs []int64, uuids []string, pr *PullRequest) (err error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ idx, err := db.GetNextResourceIndex(ctx, "issue_index", repo.ID)
+ if err != nil {
+ return fmt.Errorf("generate pull request index failed: %w", err)
+ }
+
+ issue.Index = idx
+
+ if err = NewIssueWithIndex(ctx, issue.Poster, NewIssueOptions{
+ Repo: repo,
+ Issue: issue,
+ LabelIDs: labelIDs,
+ Attachments: uuids,
+ IsPull: true,
+ }); err != nil {
+ if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) || IsErrNewIssueInsert(err) {
+ return err
+ }
+ return fmt.Errorf("newIssue: %w", err)
+ }
+
+ pr.Index = issue.Index
+ pr.BaseRepo = repo
+ pr.IssueID = issue.ID
+ if err = db.Insert(ctx, pr); err != nil {
+ return fmt.Errorf("insert pull repo: %w", err)
+ }
+
+ if err = committer.Commit(); err != nil {
+ return fmt.Errorf("Commit: %w", err)
+ }
+
+ return nil
+}
+
+// GetUnmergedPullRequest returns a pull request that is open and has not been merged
+// by given head/base and repo/branch.
+func GetUnmergedPullRequest(ctx context.Context, headRepoID, baseRepoID int64, headBranch, baseBranch string, flow PullRequestFlow) (*PullRequest, error) {
+ pr := new(PullRequest)
+ has, err := db.GetEngine(ctx).
+ Where("head_repo_id=? AND head_branch=? AND base_repo_id=? AND base_branch=? AND has_merged=? AND flow = ? AND issue.is_closed=?",
+ headRepoID, headBranch, baseRepoID, baseBranch, false, flow, false).
+ Join("INNER", "issue", "issue.id=pull_request.issue_id").
+ Get(pr)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrPullRequestNotExist{0, 0, headRepoID, baseRepoID, headBranch, baseBranch}
+ }
+
+ return pr, nil
+}
+
+// GetLatestPullRequestByHeadInfo returns the latest pull request (regardless of its status)
+// by given head information (repo and branch).
+func GetLatestPullRequestByHeadInfo(ctx context.Context, repoID int64, branch string) (*PullRequest, error) {
+ pr := new(PullRequest)
+ has, err := db.GetEngine(ctx).
+ Where("head_repo_id = ? AND head_branch = ? AND flow = ?", repoID, branch, PullRequestFlowGithub).
+ OrderBy("id DESC").
+ Get(pr)
+ if !has {
+ return nil, err
+ }
+ return pr, err
+}
+
+// GetPullRequestByIndex returns a pull request by the given index
+func GetPullRequestByIndex(ctx context.Context, repoID, index int64) (*PullRequest, error) {
+ if index < 1 {
+ return nil, ErrPullRequestNotExist{}
+ }
+ pr := &PullRequest{
+ BaseRepoID: repoID,
+ Index: index,
+ }
+
+ has, err := db.GetEngine(ctx).Get(pr)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrPullRequestNotExist{0, 0, 0, repoID, "", ""}
+ }
+
+ if err = pr.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+ if err = pr.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+
+ return pr, nil
+}
+
+// GetPullRequestByID returns a pull request by given ID.
+func GetPullRequestByID(ctx context.Context, id int64) (*PullRequest, error) {
+ pr := new(PullRequest)
+ has, err := db.GetEngine(ctx).ID(id).Get(pr)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrPullRequestNotExist{id, 0, 0, 0, "", ""}
+ }
+ return pr, pr.LoadAttributes(ctx)
+}
+
+// GetPullRequestByIssueIDWithNoAttributes returns pull request with no attributes loaded by given issue ID.
+func GetPullRequestByIssueIDWithNoAttributes(ctx context.Context, issueID int64) (*PullRequest, error) {
+ var pr PullRequest
+ has, err := db.GetEngine(ctx).Where("issue_id = ?", issueID).Get(&pr)
+ if err != nil {
+ return nil, err
+ }
+ if !has {
+ return nil, ErrPullRequestNotExist{0, issueID, 0, 0, "", ""}
+ }
+ return &pr, nil
+}
+
+// GetPullRequestByIssueID returns pull request by given issue ID.
+func GetPullRequestByIssueID(ctx context.Context, issueID int64) (*PullRequest, error) {
+ pr, exist, err := db.Get[PullRequest](ctx, builder.Eq{"issue_id": issueID})
+ if err != nil {
+ return nil, err
+ } else if !exist {
+ return nil, ErrPullRequestNotExist{0, issueID, 0, 0, "", ""}
+ }
+ return pr, pr.LoadAttributes(ctx)
+}
+
+// GetPullRequestsByBaseHeadInfo returns the pull request by given base and head
+func GetPullRequestByBaseHeadInfo(ctx context.Context, baseID, headID int64, base, head string) (*PullRequest, error) {
+ pr := &PullRequest{}
+ sess := db.GetEngine(ctx).
+ Join("INNER", "issue", "issue.id = pull_request.issue_id").
+ Where("base_repo_id = ? AND base_branch = ? AND head_repo_id = ? AND head_branch = ?", baseID, base, headID, head)
+ has, err := sess.Get(pr)
+ if err != nil {
+ return nil, err
+ }
+ if !has {
+ return nil, ErrPullRequestNotExist{
+ HeadRepoID: headID,
+ BaseRepoID: baseID,
+ HeadBranch: head,
+ BaseBranch: base,
+ }
+ }
+
+ if err = pr.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+ if err = pr.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+
+ return pr, nil
+}
+
+// GetAllUnmergedAgitPullRequestByPoster get all unmerged agit flow pull request
+// By poster id.
+func GetAllUnmergedAgitPullRequestByPoster(ctx context.Context, uid int64) ([]*PullRequest, error) {
+ pulls := make([]*PullRequest, 0, 10)
+
+ err := db.GetEngine(ctx).
+ Where("has_merged=? AND flow = ? AND issue.is_closed=? AND issue.poster_id=?",
+ false, PullRequestFlowAGit, false, uid).
+ Join("INNER", "issue", "issue.id=pull_request.issue_id").
+ Find(&pulls)
+
+ return pulls, err
+}
+
+// Update updates all fields of pull request.
+func (pr *PullRequest) Update(ctx context.Context) error {
+ _, err := db.GetEngine(ctx).ID(pr.ID).AllCols().Update(pr)
+ return err
+}
+
+// UpdateCols updates specific fields of pull request.
+func (pr *PullRequest) UpdateCols(ctx context.Context, cols ...string) error {
+ _, err := db.GetEngine(ctx).ID(pr.ID).Cols(cols...).Update(pr)
+ return err
+}
+
+// UpdateColsIfNotMerged updates specific fields of a pull request if it has not been merged
+func (pr *PullRequest) UpdateColsIfNotMerged(ctx context.Context, cols ...string) error {
+ _, err := db.GetEngine(ctx).Where("id = ? AND has_merged = ?", pr.ID, false).Cols(cols...).Update(pr)
+ return err
+}
+
+// IsWorkInProgress determine if the Pull Request is a Work In Progress by its title
+// Issue must be set before this method can be called.
+func (pr *PullRequest) IsWorkInProgress(ctx context.Context) bool {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return false
+ }
+ return HasWorkInProgressPrefix(pr.Issue.Title)
+}
+
+// HasWorkInProgressPrefix determines if the given PR title has a Work In Progress prefix
+func HasWorkInProgressPrefix(title string) bool {
+ for _, prefix := range setting.Repository.PullRequest.WorkInProgressPrefixes {
+ if strings.HasPrefix(strings.ToUpper(title), strings.ToUpper(prefix)) {
+ return true
+ }
+ }
+ return false
+}
+
+// IsFilesConflicted determines if the Pull Request has changes conflicting with the target branch.
+func (pr *PullRequest) IsFilesConflicted() bool {
+ return len(pr.ConflictedFiles) > 0
+}
+
+// GetWorkInProgressPrefix returns the prefix used to mark the pull request as a work in progress.
+// It returns an empty string when none were found
+func (pr *PullRequest) GetWorkInProgressPrefix(ctx context.Context) string {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue: %v", err)
+ return ""
+ }
+
+ for _, prefix := range setting.Repository.PullRequest.WorkInProgressPrefixes {
+ if strings.HasPrefix(strings.ToUpper(pr.Issue.Title), strings.ToUpper(prefix)) {
+ return pr.Issue.Title[0:len(prefix)]
+ }
+ }
+ return ""
+}
+
+// UpdateCommitDivergence update Divergence of a pull request
+func (pr *PullRequest) UpdateCommitDivergence(ctx context.Context, ahead, behind int) error {
+ if pr.ID == 0 {
+ return fmt.Errorf("pull ID is 0")
+ }
+ pr.CommitsAhead = ahead
+ pr.CommitsBehind = behind
+ _, err := db.GetEngine(ctx).ID(pr.ID).Cols("commits_ahead", "commits_behind").Update(pr)
+ return err
+}
+
+// IsSameRepo returns true if base repo and head repo is the same
+func (pr *PullRequest) IsSameRepo() bool {
+ return pr.BaseRepoID == pr.HeadRepoID
+}
+
+// GetBaseBranchLink returns the relative URL of the base branch
+func (pr *PullRequest) GetBaseBranchLink(ctx context.Context) string {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("LoadBaseRepo: %v", err)
+ return ""
+ }
+ if pr.BaseRepo == nil {
+ return ""
+ }
+ return pr.BaseRepo.Link() + "/src/branch/" + util.PathEscapeSegments(pr.BaseBranch)
+}
+
+// GetHeadBranchLink returns the relative URL of the head branch
+func (pr *PullRequest) GetHeadBranchLink(ctx context.Context) string {
+ if pr.Flow == PullRequestFlowAGit {
+ return ""
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("LoadHeadRepo: %v", err)
+ return ""
+ }
+ if pr.HeadRepo == nil {
+ return ""
+ }
+ return pr.HeadRepo.Link() + "/src/branch/" + util.PathEscapeSegments(pr.HeadBranch)
+}
+
+// UpdateAllowEdits update if PR can be edited from maintainers
+func UpdateAllowEdits(ctx context.Context, pr *PullRequest) error {
+ if _, err := db.GetEngine(ctx).ID(pr.ID).Cols("allow_maintainer_edit").Update(pr); err != nil {
+ return err
+ }
+ return nil
+}
+
+// Mergeable returns if the pullrequest is mergeable.
+func (pr *PullRequest) Mergeable(ctx context.Context) bool {
+ // If a pull request isn't mergeable if it's:
+ // - Being conflict checked.
+ // - Has a conflict.
+ // - Received a error while being conflict checked.
+ // - Is a work-in-progress pull request.
+ return pr.Status != PullRequestStatusChecking && pr.Status != PullRequestStatusConflict &&
+ pr.Status != PullRequestStatusError && !pr.IsWorkInProgress(ctx)
+}
+
+// HasEnoughApprovals returns true if pr has enough granted approvals.
+func HasEnoughApprovals(ctx context.Context, protectBranch *git_model.ProtectedBranch, pr *PullRequest) bool {
+ if protectBranch.RequiredApprovals == 0 {
+ return true
+ }
+ return GetGrantedApprovalsCount(ctx, protectBranch, pr) >= protectBranch.RequiredApprovals
+}
+
+// GetGrantedApprovalsCount returns the number of granted approvals for pr. A granted approval must be authored by a user in an approval whitelist.
+func GetGrantedApprovalsCount(ctx context.Context, protectBranch *git_model.ProtectedBranch, pr *PullRequest) int64 {
+ sess := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
+ And("type = ?", ReviewTypeApprove).
+ And("official = ?", true).
+ And("dismissed = ?", false)
+ if protectBranch.IgnoreStaleApprovals {
+ sess = sess.And("stale = ?", false)
+ }
+ approvals, err := sess.Count(new(Review))
+ if err != nil {
+ log.Error("GetGrantedApprovalsCount: %v", err)
+ return 0
+ }
+
+ return approvals
+}
+
+// MergeBlockedByRejectedReview returns true if merge is blocked by rejected reviews
+func MergeBlockedByRejectedReview(ctx context.Context, protectBranch *git_model.ProtectedBranch, pr *PullRequest) bool {
+ if !protectBranch.BlockOnRejectedReviews {
+ return false
+ }
+ rejectExist, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
+ And("type = ?", ReviewTypeReject).
+ And("official = ?", true).
+ And("dismissed = ?", false).
+ Exist(new(Review))
+ if err != nil {
+ log.Error("MergeBlockedByRejectedReview: %v", err)
+ return true
+ }
+
+ return rejectExist
+}
+
+// MergeBlockedByOfficialReviewRequests block merge because of some review request to official reviewer
+// of from official review
+func MergeBlockedByOfficialReviewRequests(ctx context.Context, protectBranch *git_model.ProtectedBranch, pr *PullRequest) bool {
+ if !protectBranch.BlockOnOfficialReviewRequests {
+ return false
+ }
+ has, err := db.GetEngine(ctx).Where("issue_id = ?", pr.IssueID).
+ And("type = ?", ReviewTypeRequest).
+ And("official = ?", true).
+ Exist(new(Review))
+ if err != nil {
+ log.Error("MergeBlockedByOfficialReviewRequests: %v", err)
+ return true
+ }
+
+ return has
+}
+
+// MergeBlockedByOutdatedBranch returns true if merge is blocked by an outdated head branch
+func MergeBlockedByOutdatedBranch(protectBranch *git_model.ProtectedBranch, pr *PullRequest) bool {
+ return protectBranch.BlockOnOutdatedBranch && pr.CommitsBehind > 0
+}
+
+// GetCodeOwnersFromContent returns the code owners configuration
+// Return empty slice if files missing
+// Return warning messages on parsing errors
+// We're trying to do the best we can when parsing a file.
+// Invalid lines are skipped. Non-existent users and teams too.
+func GetCodeOwnersFromContent(ctx context.Context, data string) ([]*CodeOwnerRule, []string) {
+ if len(data) == 0 {
+ return nil, nil
+ }
+
+ rules := make([]*CodeOwnerRule, 0)
+ lines := strings.Split(data, "\n")
+ warnings := make([]string, 0)
+
+ for i, line := range lines {
+ tokens := TokenizeCodeOwnersLine(line)
+ if len(tokens) == 0 {
+ continue
+ } else if len(tokens) < 2 {
+ warnings = append(warnings, fmt.Sprintf("Line: %d: incorrect format", i+1))
+ continue
+ }
+ rule, wr := ParseCodeOwnersLine(ctx, tokens)
+ for _, w := range wr {
+ warnings = append(warnings, fmt.Sprintf("Line: %d: %s", i+1, w))
+ }
+ if rule == nil {
+ continue
+ }
+
+ rules = append(rules, rule)
+ }
+
+ return rules, warnings
+}
+
+type CodeOwnerRule struct {
+ Rule *regexp.Regexp
+ Negative bool
+ Users []*user_model.User
+ Teams []*org_model.Team
+}
+
+func ParseCodeOwnersLine(ctx context.Context, tokens []string) (*CodeOwnerRule, []string) {
+ var err error
+ rule := &CodeOwnerRule{
+ Users: make([]*user_model.User, 0),
+ Teams: make([]*org_model.Team, 0),
+ Negative: strings.HasPrefix(tokens[0], "!"),
+ }
+
+ warnings := make([]string, 0)
+
+ rule.Rule, err = regexp.Compile(fmt.Sprintf("^%s$", strings.TrimPrefix(tokens[0], "!")))
+ if err != nil {
+ warnings = append(warnings, fmt.Sprintf("incorrect codeowner regexp: %s", err))
+ return nil, warnings
+ }
+
+ for _, user := range tokens[1:] {
+ user = strings.TrimPrefix(user, "@")
+
+ // Only @org/team can contain slashes
+ if strings.Contains(user, "/") {
+ s := strings.Split(user, "/")
+ if len(s) != 2 {
+ warnings = append(warnings, fmt.Sprintf("incorrect codeowner group: %s", user))
+ continue
+ }
+ orgName := s[0]
+ teamName := s[1]
+
+ org, err := org_model.GetOrgByName(ctx, orgName)
+ if err != nil {
+ warnings = append(warnings, fmt.Sprintf("incorrect codeowner organization: %s", user))
+ continue
+ }
+ teams, err := org.LoadTeams(ctx)
+ if err != nil {
+ warnings = append(warnings, fmt.Sprintf("incorrect codeowner team: %s", user))
+ continue
+ }
+
+ for _, team := range teams {
+ if team.Name == teamName {
+ rule.Teams = append(rule.Teams, team)
+ }
+ }
+ } else {
+ u, err := user_model.GetUserByName(ctx, user)
+ if err != nil {
+ warnings = append(warnings, fmt.Sprintf("incorrect codeowner user: %s", user))
+ continue
+ }
+ rule.Users = append(rule.Users, u)
+ }
+ }
+
+ if (len(rule.Users) == 0) && (len(rule.Teams) == 0) {
+ warnings = append(warnings, "no users/groups matched")
+ return nil, warnings
+ }
+
+ return rule, warnings
+}
+
+func TokenizeCodeOwnersLine(line string) []string {
+ if len(line) == 0 {
+ return nil
+ }
+
+ line = strings.TrimSpace(line)
+ line = strings.ReplaceAll(line, "\t", " ")
+
+ tokens := make([]string, 0)
+
+ escape := false
+ token := ""
+ for _, char := range line {
+ if escape {
+ token += string(char)
+ escape = false
+ } else if string(char) == "\\" {
+ escape = true
+ } else if string(char) == "#" {
+ break
+ } else if string(char) == " " {
+ if len(token) > 0 {
+ tokens = append(tokens, token)
+ token = ""
+ }
+ } else {
+ token += string(char)
+ }
+ }
+
+ if len(token) > 0 {
+ tokens = append(tokens, token)
+ }
+
+ return tokens
+}
+
+// InsertPullRequests inserted pull requests
+func InsertPullRequests(ctx context.Context, prs ...*PullRequest) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+ for _, pr := range prs {
+ if err := insertIssue(ctx, pr.Issue); err != nil {
+ return err
+ }
+ pr.IssueID = pr.Issue.ID
+ if _, err := sess.NoAutoTime().Insert(pr); err != nil {
+ return err
+ }
+ }
+ return committer.Commit()
+}
+
+// GetPullRequestByMergedCommit returns a merged pull request by the given commit
+func GetPullRequestByMergedCommit(ctx context.Context, repoID int64, sha string) (*PullRequest, error) {
+ pr := new(PullRequest)
+ has, err := db.GetEngine(ctx).Where("base_repo_id = ? AND merged_commit_id = ?", repoID, sha).Get(pr)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrPullRequestNotExist{0, 0, 0, repoID, "", ""}
+ }
+
+ if err = pr.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+ if err = pr.LoadIssue(ctx); err != nil {
+ return nil, err
+ }
+
+ return pr, nil
+}
diff --git a/models/issues/pull_list.go b/models/issues/pull_list.go
new file mode 100644
index 0000000..f3970fa
--- /dev/null
+++ b/models/issues/pull_list.go
@@ -0,0 +1,264 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/xorm"
+)
+
+// PullRequestsOptions holds the options for PRs
+type PullRequestsOptions struct {
+ db.ListOptions
+ State string
+ SortType string
+ Labels []int64
+ MilestoneID int64
+}
+
+func listPullRequestStatement(ctx context.Context, baseRepoID int64, opts *PullRequestsOptions) *xorm.Session {
+ sess := db.GetEngine(ctx).Where("pull_request.base_repo_id=?", baseRepoID)
+
+ sess.Join("INNER", "issue", "pull_request.issue_id = issue.id")
+ switch opts.State {
+ case "closed", "open":
+ sess.And("issue.is_closed=?", opts.State == "closed")
+ }
+
+ if len(opts.Labels) > 0 {
+ sess.Join("INNER", "issue_label", "issue.id = issue_label.issue_id").
+ In("issue_label.label_id", opts.Labels)
+ }
+
+ if opts.MilestoneID > 0 {
+ sess.And("issue.milestone_id=?", opts.MilestoneID)
+ }
+
+ return sess
+}
+
+func GetUnmergedPullRequestsByHeadInfoMax(ctx context.Context, repoID, olderThan int64, branch string) ([]*PullRequest, error) {
+ prs := make([]*PullRequest, 0, 2)
+ sess := db.GetEngine(ctx).
+ Join("INNER", "issue", "issue.id = `pull_request`.issue_id").
+ Where("`pull_request`.head_repo_id = ? AND `pull_request`.head_branch = ? AND `pull_request`.has_merged = ? AND `issue`.is_closed = ? AND `pull_request`.flow = ? AND (`issue`.`created` IS NULL OR `issue`.`created` <= ?)", repoID, branch, false, false, PullRequestFlowGithub, olderThan)
+ return prs, sess.Find(&prs)
+}
+
+// GetUnmergedPullRequestsByHeadInfo returns all pull requests that are open and has not been merged
+func GetUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) {
+ prs := make([]*PullRequest, 0, 2)
+ sess := db.GetEngine(ctx).
+ Join("INNER", "issue", "issue.id = pull_request.issue_id").
+ Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND issue.is_closed = ? AND flow = ?", repoID, branch, false, false, PullRequestFlowGithub)
+ return prs, sess.Find(&prs)
+}
+
+// CanMaintainerWriteToBranch check whether user is a maintainer and could write to the branch
+func CanMaintainerWriteToBranch(ctx context.Context, p access_model.Permission, branch string, user *user_model.User) bool {
+ if p.CanWrite(unit.TypeCode) {
+ return true
+ }
+
+ if len(p.Units) < 1 {
+ return false
+ }
+
+ prs, err := GetUnmergedPullRequestsByHeadInfo(ctx, p.Units[0].RepoID, branch)
+ if err != nil {
+ return false
+ }
+
+ for _, pr := range prs {
+ if pr.AllowMaintainerEdit {
+ err = pr.LoadBaseRepo(ctx)
+ if err != nil {
+ continue
+ }
+ prPerm, err := access_model.GetUserRepoPermission(ctx, pr.BaseRepo, user)
+ if err != nil {
+ continue
+ }
+ if prPerm.CanWrite(unit.TypeCode) {
+ return true
+ }
+ }
+ }
+ return false
+}
+
+// HasUnmergedPullRequestsByHeadInfo checks if there are open and not merged pull request
+// by given head information (repo and branch)
+func HasUnmergedPullRequestsByHeadInfo(ctx context.Context, repoID int64, branch string) (bool, error) {
+ return db.GetEngine(ctx).
+ Where("head_repo_id = ? AND head_branch = ? AND has_merged = ? AND issue.is_closed = ? AND flow = ?",
+ repoID, branch, false, false, PullRequestFlowGithub).
+ Join("INNER", "issue", "issue.id = pull_request.issue_id").
+ Exist(&PullRequest{})
+}
+
+// GetUnmergedPullRequestsByBaseInfo returns all pull requests that are open and has not been merged
+// by given base information (repo and branch).
+func GetUnmergedPullRequestsByBaseInfo(ctx context.Context, repoID int64, branch string) ([]*PullRequest, error) {
+ prs := make([]*PullRequest, 0, 2)
+ return prs, db.GetEngine(ctx).
+ Where("base_repo_id=? AND base_branch=? AND has_merged=? AND issue.is_closed=?",
+ repoID, branch, false, false).
+ OrderBy("issue.updated_unix DESC").
+ Join("INNER", "issue", "issue.id=pull_request.issue_id").
+ Find(&prs)
+}
+
+// GetPullRequestIDsByCheckStatus returns all pull requests according the special checking status.
+func GetPullRequestIDsByCheckStatus(ctx context.Context, status PullRequestStatus) ([]int64, error) {
+ prs := make([]int64, 0, 10)
+ return prs, db.GetEngine(ctx).Table("pull_request").
+ Where("status=?", status).
+ Cols("pull_request.id").
+ Find(&prs)
+}
+
+// PullRequests returns all pull requests for a base Repo by the given conditions
+func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptions) (PullRequestList, int64, error) {
+ if opts.Page <= 0 {
+ opts.Page = 1
+ }
+
+ countSession := listPullRequestStatement(ctx, baseRepoID, opts)
+ maxResults, err := countSession.Count(new(PullRequest))
+ if err != nil {
+ log.Error("Count PRs: %v", err)
+ return nil, maxResults, err
+ }
+
+ findSession := listPullRequestStatement(ctx, baseRepoID, opts)
+ applySorts(findSession, opts.SortType, 0)
+ findSession = db.SetSessionPagination(findSession, opts)
+ prs := make([]*PullRequest, 0, opts.PageSize)
+ return prs, maxResults, findSession.Find(&prs)
+}
+
+// PullRequestList defines a list of pull requests
+type PullRequestList []*PullRequest
+
+func (prs PullRequestList) getRepositoryIDs() []int64 {
+ repoIDs := make(container.Set[int64])
+ for _, pr := range prs {
+ if pr.BaseRepo == nil && pr.BaseRepoID > 0 {
+ repoIDs.Add(pr.BaseRepoID)
+ }
+ if pr.HeadRepo == nil && pr.HeadRepoID > 0 {
+ repoIDs.Add(pr.HeadRepoID)
+ }
+ }
+ return repoIDs.Values()
+}
+
+func (prs PullRequestList) LoadRepositories(ctx context.Context) error {
+ repoIDs := prs.getRepositoryIDs()
+ reposMap := make(map[int64]*repo_model.Repository, len(repoIDs))
+ if err := db.GetEngine(ctx).
+ In("id", repoIDs).
+ Find(&reposMap); err != nil {
+ return fmt.Errorf("find repos: %w", err)
+ }
+ for _, pr := range prs {
+ if pr.BaseRepo == nil {
+ pr.BaseRepo = reposMap[pr.BaseRepoID]
+ }
+ if pr.HeadRepo == nil {
+ pr.HeadRepo = reposMap[pr.HeadRepoID]
+ pr.isHeadRepoLoaded = true
+ }
+ }
+ return nil
+}
+
+func (prs PullRequestList) LoadAttributes(ctx context.Context) error {
+ if _, err := prs.LoadIssues(ctx); err != nil {
+ return err
+ }
+ return nil
+}
+
+func (prs PullRequestList) LoadIssues(ctx context.Context) (IssueList, error) {
+ if len(prs) == 0 {
+ return nil, nil
+ }
+
+ // Load issues which are not loaded
+ issueIDs := container.FilterSlice(prs, func(pr *PullRequest) (int64, bool) {
+ return pr.IssueID, pr.Issue == nil && pr.IssueID > 0
+ })
+ issues := make(map[int64]*Issue, len(issueIDs))
+ if err := db.GetEngine(ctx).
+ In("id", issueIDs).
+ Find(&issues); err != nil {
+ return nil, fmt.Errorf("find issues: %w", err)
+ }
+
+ issueList := make(IssueList, 0, len(prs))
+ for _, pr := range prs {
+ if pr.Issue == nil {
+ pr.Issue = issues[pr.IssueID]
+ /*
+ Old code:
+ pr.Issue.PullRequest = pr // panic here means issueIDs and prs are not in sync
+
+ It's worth panic because it's almost impossible to happen under normal use.
+ But in integration testing, an asynchronous task could read a database that has been reset.
+ So returning an error would make more sense, let the caller has a choice to ignore it.
+ */
+ if pr.Issue == nil {
+ return nil, fmt.Errorf("issues and prs may be not in sync: cannot find issue %v for pr %v: %w", pr.IssueID, pr.ID, util.ErrNotExist)
+ }
+ }
+ pr.Issue.PullRequest = pr
+ if pr.Issue.Repo == nil {
+ pr.Issue.Repo = pr.BaseRepo
+ }
+ issueList = append(issueList, pr.Issue)
+ }
+ return issueList, nil
+}
+
+// GetIssueIDs returns all issue ids
+func (prs PullRequestList) GetIssueIDs() []int64 {
+ return container.FilterSlice(prs, func(pr *PullRequest) (int64, bool) {
+ return pr.IssueID, pr.IssueID > 0
+ })
+}
+
+// HasMergedPullRequestInRepo returns whether the user(poster) has merged pull-request in the repo
+func HasMergedPullRequestInRepo(ctx context.Context, repoID, posterID int64) (bool, error) {
+ return db.GetEngine(ctx).
+ Join("INNER", "pull_request", "pull_request.issue_id = issue.id").
+ Where("repo_id=?", repoID).
+ And("poster_id=?", posterID).
+ And("is_pull=?", true).
+ And("pull_request.has_merged=?", true).
+ Select("issue.id").
+ Limit(1).
+ Get(new(Issue))
+}
+
+// GetPullRequestByIssueIDs returns all pull requests by issue ids
+func GetPullRequestByIssueIDs(ctx context.Context, issueIDs []int64) (PullRequestList, error) {
+ prs := make([]*PullRequest, 0, len(issueIDs))
+ return prs, db.GetEngine(ctx).
+ Where("issue_id > 0").
+ In("issue_id", issueIDs).
+ Find(&prs)
+}
diff --git a/models/issues/pull_test.go b/models/issues/pull_test.go
new file mode 100644
index 0000000..8e0c020
--- /dev/null
+++ b/models/issues/pull_test.go
@@ -0,0 +1,476 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/tests"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPullRequest_LoadAttributes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadAttributes(db.DefaultContext))
+ assert.NotNil(t, pr.Merger)
+ assert.Equal(t, pr.MergerID, pr.Merger.ID)
+}
+
+func TestPullRequest_LoadIssue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadIssue(db.DefaultContext))
+ assert.NotNil(t, pr.Issue)
+ assert.Equal(t, int64(2), pr.Issue.ID)
+ require.NoError(t, pr.LoadIssue(db.DefaultContext))
+ assert.NotNil(t, pr.Issue)
+ assert.Equal(t, int64(2), pr.Issue.ID)
+}
+
+func TestPullRequest_LoadBaseRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ assert.NotNil(t, pr.BaseRepo)
+ assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ assert.NotNil(t, pr.BaseRepo)
+ assert.Equal(t, pr.BaseRepoID, pr.BaseRepo.ID)
+}
+
+func TestPullRequest_LoadHeadRepo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pr.LoadHeadRepo(db.DefaultContext))
+ assert.NotNil(t, pr.HeadRepo)
+ assert.Equal(t, pr.HeadRepoID, pr.HeadRepo.ID)
+}
+
+// TODO TestMerge
+
+// TODO TestNewPullRequest
+
+func TestPullRequestsNewest(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ prs, count, err := issues_model.PullRequests(db.DefaultContext, 1, &issues_model.PullRequestsOptions{
+ ListOptions: db.ListOptions{
+ Page: 1,
+ },
+ State: "open",
+ SortType: "newest",
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, 3, count)
+ if assert.Len(t, prs, 3) {
+ assert.EqualValues(t, 5, prs[0].ID)
+ assert.EqualValues(t, 2, prs[1].ID)
+ assert.EqualValues(t, 1, prs[2].ID)
+ }
+}
+
+func TestLoadRequestedReviewers(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pull.LoadIssue(db.DefaultContext))
+ issue := pull.Issue
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+ assert.Empty(t, pull.RequestedReviewers)
+
+ user1, err := user_model.GetUserByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ comment, err := issues_model.AddReviewRequest(db.DefaultContext, issue, user1, &user_model.User{})
+ require.NoError(t, err)
+ assert.NotNil(t, comment)
+
+ require.NoError(t, pull.LoadRequestedReviewers(db.DefaultContext))
+ assert.Len(t, pull.RequestedReviewers, 1)
+
+ comment, err = issues_model.RemoveReviewRequest(db.DefaultContext, issue, user1, &user_model.User{})
+ require.NoError(t, err)
+ assert.NotNil(t, comment)
+
+ pull.RequestedReviewers = nil
+ require.NoError(t, pull.LoadRequestedReviewers(db.DefaultContext))
+ assert.Empty(t, pull.RequestedReviewers)
+}
+
+func TestPullRequestsOldest(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ prs, count, err := issues_model.PullRequests(db.DefaultContext, 1, &issues_model.PullRequestsOptions{
+ ListOptions: db.ListOptions{
+ Page: 1,
+ },
+ State: "open",
+ SortType: "oldest",
+ })
+ require.NoError(t, err)
+ assert.EqualValues(t, 3, count)
+ if assert.Len(t, prs, 3) {
+ assert.EqualValues(t, 1, prs[0].ID)
+ assert.EqualValues(t, 2, prs[1].ID)
+ assert.EqualValues(t, 5, prs[2].ID)
+ }
+}
+
+func TestGetUnmergedPullRequest(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr, err := issues_model.GetUnmergedPullRequest(db.DefaultContext, 1, 1, "branch2", "master", issues_model.PullRequestFlowGithub)
+ require.NoError(t, err)
+ assert.Equal(t, int64(2), pr.ID)
+
+ _, err = issues_model.GetUnmergedPullRequest(db.DefaultContext, 1, 9223372036854775807, "branch1", "master", issues_model.PullRequestFlowGithub)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrPullRequestNotExist(err))
+}
+
+func TestHasUnmergedPullRequestsByHeadInfo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "branch2")
+ require.NoError(t, err)
+ assert.True(t, exist)
+
+ exist, err = issues_model.HasUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "not_exist_branch")
+ require.NoError(t, err)
+ assert.False(t, exist)
+}
+
+func TestGetUnmergedPullRequestsByHeadInfo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(db.DefaultContext, 1, "branch2")
+ require.NoError(t, err)
+ assert.Len(t, prs, 1)
+ for _, pr := range prs {
+ assert.Equal(t, int64(1), pr.HeadRepoID)
+ assert.Equal(t, "branch2", pr.HeadBranch)
+ }
+}
+
+func TestGetUnmergedPullRequestsByHeadInfoMax(t *testing.T) {
+ defer tests.AddFixtures("models/fixtures/TestGetUnmergedPullRequestsByHeadInfoMax/")()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ repoID := int64(1)
+ olderThan := int64(0)
+
+ // for NULL created field the olderThan condition is ignored
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, olderThan, "branch2")
+ require.NoError(t, err)
+ assert.Equal(t, int64(1), prs[0].HeadRepoID)
+
+ // test for when the created field is set
+ branch := "branchmax"
+ prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, olderThan, branch)
+ require.NoError(t, err)
+ assert.Empty(t, prs)
+ olderThan = time.Now().UnixNano()
+ require.NoError(t, err)
+ prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, olderThan, branch)
+ require.NoError(t, err)
+ assert.Len(t, prs, 1)
+ for _, pr := range prs {
+ assert.Equal(t, int64(1), pr.HeadRepoID)
+ assert.Equal(t, branch, pr.HeadBranch)
+ }
+ pr := prs[0]
+
+ for _, testCase := range []struct {
+ table string
+ field string
+ id int64
+ match any
+ nomatch any
+ }{
+ {
+ table: "issue",
+ field: "is_closed",
+ id: pr.IssueID,
+ match: false,
+ nomatch: true,
+ },
+ {
+ table: "pull_request",
+ field: "flow",
+ id: pr.ID,
+ match: issues_model.PullRequestFlowGithub,
+ nomatch: issues_model.PullRequestFlowAGit,
+ },
+ {
+ table: "pull_request",
+ field: "head_repo_id",
+ id: pr.ID,
+ match: pr.HeadRepoID,
+ nomatch: 0,
+ },
+ {
+ table: "pull_request",
+ field: "head_branch",
+ id: pr.ID,
+ match: pr.HeadBranch,
+ nomatch: "something else",
+ },
+ {
+ table: "pull_request",
+ field: "has_merged",
+ id: pr.ID,
+ match: false,
+ nomatch: true,
+ },
+ } {
+ t.Run(testCase.field, func(t *testing.T) {
+ update := fmt.Sprintf("UPDATE `%s` SET `%s` = ? WHERE `id` = ?", testCase.table, testCase.field)
+
+ // expect no match
+ _, err = db.GetEngine(db.DefaultContext).Exec(update, testCase.nomatch, testCase.id)
+ require.NoError(t, err)
+ prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, olderThan, branch)
+ require.NoError(t, err)
+ assert.Empty(t, prs)
+
+ // expect one match
+ _, err = db.GetEngine(db.DefaultContext).Exec(update, testCase.match, testCase.id)
+ require.NoError(t, err)
+ prs, err = issues_model.GetUnmergedPullRequestsByHeadInfoMax(db.DefaultContext, repoID, olderThan, branch)
+ require.NoError(t, err)
+ assert.Len(t, prs, 1)
+
+ // identical to the known PR
+ assert.Equal(t, pr.ID, prs[0].ID)
+ })
+ }
+}
+
+func TestGetUnmergedPullRequestsByBaseInfo(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(db.DefaultContext, 1, "master")
+ require.NoError(t, err)
+ assert.Len(t, prs, 1)
+ pr := prs[0]
+ assert.Equal(t, int64(2), pr.ID)
+ assert.Equal(t, int64(1), pr.BaseRepoID)
+ assert.Equal(t, "master", pr.BaseBranch)
+}
+
+func TestGetPullRequestByIndex(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr, err := issues_model.GetPullRequestByIndex(db.DefaultContext, 1, 2)
+ require.NoError(t, err)
+ assert.Equal(t, int64(1), pr.BaseRepoID)
+ assert.Equal(t, int64(2), pr.Index)
+
+ _, err = issues_model.GetPullRequestByIndex(db.DefaultContext, 9223372036854775807, 9223372036854775807)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrPullRequestNotExist(err))
+
+ _, err = issues_model.GetPullRequestByIndex(db.DefaultContext, 1, 0)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrPullRequestNotExist(err))
+}
+
+func TestGetPullRequestByID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr, err := issues_model.GetPullRequestByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.Equal(t, int64(1), pr.ID)
+ assert.Equal(t, int64(2), pr.IssueID)
+
+ _, err = issues_model.GetPullRequestByID(db.DefaultContext, 9223372036854775807)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrPullRequestNotExist(err))
+}
+
+func TestGetPullRequestByIssueID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr, err := issues_model.GetPullRequestByIssueID(db.DefaultContext, 2)
+ require.NoError(t, err)
+ assert.Equal(t, int64(2), pr.IssueID)
+
+ _, err = issues_model.GetPullRequestByIssueID(db.DefaultContext, 9223372036854775807)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrPullRequestNotExist(err))
+}
+
+func TestPullRequest_Update(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ pr.BaseBranch = "baseBranch"
+ pr.HeadBranch = "headBranch"
+ pr.Update(db.DefaultContext)
+
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: pr.ID})
+ assert.Equal(t, "baseBranch", pr.BaseBranch)
+ assert.Equal(t, "headBranch", pr.HeadBranch)
+ unittest.CheckConsistencyFor(t, pr)
+}
+
+func TestPullRequest_UpdateCols(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := &issues_model.PullRequest{
+ ID: 1,
+ BaseBranch: "baseBranch",
+ HeadBranch: "headBranch",
+ }
+ require.NoError(t, pr.UpdateCols(db.DefaultContext, "head_branch"))
+
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ assert.Equal(t, "master", pr.BaseBranch)
+ assert.Equal(t, "headBranch", pr.HeadBranch)
+ unittest.CheckConsistencyFor(t, pr)
+}
+
+func TestPullRequestList_LoadAttributes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ prs := []*issues_model.PullRequest{
+ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1}),
+ unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2}),
+ }
+ require.NoError(t, issues_model.PullRequestList(prs).LoadAttributes(db.DefaultContext))
+ for _, pr := range prs {
+ assert.NotNil(t, pr.Issue)
+ assert.Equal(t, pr.IssueID, pr.Issue.ID)
+ }
+
+ require.NoError(t, issues_model.PullRequestList([]*issues_model.PullRequest{}).LoadAttributes(db.DefaultContext))
+}
+
+// TODO TestAddTestPullRequestTask
+
+func TestPullRequest_IsWorkInProgress(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ pr.LoadIssue(db.DefaultContext)
+
+ assert.False(t, pr.IsWorkInProgress(db.DefaultContext))
+
+ pr.Issue.Title = "WIP: " + pr.Issue.Title
+ assert.True(t, pr.IsWorkInProgress(db.DefaultContext))
+
+ pr.Issue.Title = "[wip]: " + pr.Issue.Title
+ assert.True(t, pr.IsWorkInProgress(db.DefaultContext))
+}
+
+func TestPullRequest_GetWorkInProgressPrefixWorkInProgress(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ pr.LoadIssue(db.DefaultContext)
+
+ assert.Empty(t, pr.GetWorkInProgressPrefix(db.DefaultContext))
+
+ original := pr.Issue.Title
+ pr.Issue.Title = "WIP: " + original
+ assert.Equal(t, "WIP:", pr.GetWorkInProgressPrefix(db.DefaultContext))
+
+ pr.Issue.Title = "[wip] " + original
+ assert.Equal(t, "[wip]", pr.GetWorkInProgressPrefix(db.DefaultContext))
+}
+
+func TestDeleteOrphanedObjects(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ countBefore, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{})
+ require.NoError(t, err)
+
+ _, err = db.GetEngine(db.DefaultContext).Insert(&issues_model.PullRequest{IssueID: 1000}, &issues_model.PullRequest{IssueID: 1001}, &issues_model.PullRequest{IssueID: 1003})
+ require.NoError(t, err)
+
+ orphaned, err := db.CountOrphanedObjects(db.DefaultContext, "pull_request", "issue", "pull_request.issue_id=issue.id")
+ require.NoError(t, err)
+ assert.EqualValues(t, 3, orphaned)
+
+ err = db.DeleteOrphanedObjects(db.DefaultContext, "pull_request", "issue", "pull_request.issue_id=issue.id")
+ require.NoError(t, err)
+
+ countAfter, err := db.GetEngine(db.DefaultContext).Count(&issues_model.PullRequest{})
+ require.NoError(t, err)
+ assert.EqualValues(t, countBefore, countAfter)
+}
+
+func TestParseCodeOwnersLine(t *testing.T) {
+ type CodeOwnerTest struct {
+ Line string
+ Tokens []string
+ }
+
+ given := []CodeOwnerTest{
+ {Line: "", Tokens: nil},
+ {Line: "# comment", Tokens: []string{}},
+ {Line: "!.* @user1 @org1/team1", Tokens: []string{"!.*", "@user1", "@org1/team1"}},
+ {Line: `.*\\.js @user2 #comment`, Tokens: []string{`.*\.js`, "@user2"}},
+ {Line: `docs/(aws|google|azure)/[^/]*\\.(md|txt) @org3 @org2/team2`, Tokens: []string{`docs/(aws|google|azure)/[^/]*\.(md|txt)`, "@org3", "@org2/team2"}},
+ {Line: `\#path @org3`, Tokens: []string{`#path`, "@org3"}},
+ {Line: `path\ with\ spaces/ @org3`, Tokens: []string{`path with spaces/`, "@org3"}},
+ }
+
+ for _, g := range given {
+ tokens := issues_model.TokenizeCodeOwnersLine(g.Line)
+ assert.Equal(t, g.Tokens, tokens, "Codeowners tokenizer failed")
+ }
+}
+
+func TestGetApprovers(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 5})
+ // Official reviews are already deduplicated. Allow unofficial reviews
+ // to assert that there are no duplicated approvers.
+ setting.Repository.PullRequest.DefaultMergeMessageOfficialApproversOnly = false
+ approvers := pr.GetApprovers(db.DefaultContext)
+ expected := "Reviewed-by: User Five <user5@example.com>\nReviewed-by: Org Six <org6@example.com>\n"
+ assert.EqualValues(t, expected, approvers)
+}
+
+func TestGetPullRequestByMergedCommit(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr, err := issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 1, "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3")
+ require.NoError(t, err)
+ assert.EqualValues(t, 1, pr.ID)
+
+ _, err = issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 0, "1a8823cd1a9549fde083f992f6b9b87a7ab74fb3")
+ require.ErrorAs(t, err, &issues_model.ErrPullRequestNotExist{})
+ _, err = issues_model.GetPullRequestByMergedCommit(db.DefaultContext, 1, "")
+ require.ErrorAs(t, err, &issues_model.ErrPullRequestNotExist{})
+}
+
+func TestMigrate_InsertPullRequests(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ reponame := "repo1"
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{Name: reponame})
+ owner := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: repo.OwnerID})
+
+ i := &issues_model.Issue{
+ RepoID: repo.ID,
+ Repo: repo,
+ Title: "title1",
+ Content: "issuecontent1",
+ IsPull: true,
+ PosterID: owner.ID,
+ Poster: owner,
+ }
+
+ p := &issues_model.PullRequest{
+ Issue: i,
+ }
+
+ err := issues_model.InsertPullRequests(db.DefaultContext, p)
+ require.NoError(t, err)
+
+ _ = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{IssueID: i.ID})
+
+ unittest.CheckConsistencyFor(t, &issues_model.Issue{}, &issues_model.PullRequest{})
+}
diff --git a/models/issues/reaction.go b/models/issues/reaction.go
new file mode 100644
index 0000000..eb7faef
--- /dev/null
+++ b/models/issues/reaction.go
@@ -0,0 +1,373 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrForbiddenIssueReaction is used when a forbidden reaction was try to created
+type ErrForbiddenIssueReaction struct {
+ Reaction string
+}
+
+// IsErrForbiddenIssueReaction checks if an error is a ErrForbiddenIssueReaction.
+func IsErrForbiddenIssueReaction(err error) bool {
+ _, ok := err.(ErrForbiddenIssueReaction)
+ return ok
+}
+
+func (err ErrForbiddenIssueReaction) Error() string {
+ return fmt.Sprintf("'%s' is not an allowed reaction", err.Reaction)
+}
+
+func (err ErrForbiddenIssueReaction) Unwrap() error {
+ return util.ErrPermissionDenied
+}
+
+// ErrReactionAlreadyExist is used when a existing reaction was try to created
+type ErrReactionAlreadyExist struct {
+ Reaction string
+}
+
+// IsErrReactionAlreadyExist checks if an error is a ErrReactionAlreadyExist.
+func IsErrReactionAlreadyExist(err error) bool {
+ _, ok := err.(ErrReactionAlreadyExist)
+ return ok
+}
+
+func (err ErrReactionAlreadyExist) Error() string {
+ return fmt.Sprintf("reaction '%s' already exists", err.Reaction)
+}
+
+func (err ErrReactionAlreadyExist) Unwrap() error {
+ return util.ErrAlreadyExist
+}
+
+// Reaction represents a reactions on issues and comments.
+type Reaction struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type string `xorm:"INDEX UNIQUE(s) NOT NULL"`
+ IssueID int64 `xorm:"INDEX UNIQUE(s) NOT NULL"`
+ CommentID int64 `xorm:"INDEX UNIQUE(s)"`
+ UserID int64 `xorm:"INDEX UNIQUE(s) NOT NULL"`
+ OriginalAuthorID int64 `xorm:"INDEX UNIQUE(s) NOT NULL DEFAULT(0)"`
+ OriginalAuthor string `xorm:"INDEX UNIQUE(s)"`
+ User *user_model.User `xorm:"-"`
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+}
+
+// LoadUser load user of reaction
+func (r *Reaction) LoadUser(ctx context.Context) (*user_model.User, error) {
+ if r.User != nil {
+ return r.User, nil
+ }
+ user, err := user_model.GetUserByID(ctx, r.UserID)
+ if err != nil {
+ return nil, err
+ }
+ r.User = user
+ return user, nil
+}
+
+// RemapExternalUser ExternalUserRemappable interface
+func (r *Reaction) RemapExternalUser(externalName string, externalID, userID int64) error {
+ r.OriginalAuthor = externalName
+ r.OriginalAuthorID = externalID
+ r.UserID = userID
+ return nil
+}
+
+// GetUserID ExternalUserRemappable interface
+func (r *Reaction) GetUserID() int64 { return r.UserID }
+
+// GetExternalName ExternalUserRemappable interface
+func (r *Reaction) GetExternalName() string { return r.OriginalAuthor }
+
+// GetExternalID ExternalUserRemappable interface
+func (r *Reaction) GetExternalID() int64 { return r.OriginalAuthorID }
+
+func init() {
+ db.RegisterModel(new(Reaction))
+}
+
+// FindReactionsOptions describes the conditions to Find reactions
+type FindReactionsOptions struct {
+ db.ListOptions
+ IssueID int64
+ CommentID int64
+ UserID int64
+ Reaction string
+}
+
+func (opts *FindReactionsOptions) toConds() builder.Cond {
+ // If Issue ID is set add to Query
+ cond := builder.NewCond()
+ if opts.IssueID > 0 {
+ cond = cond.And(builder.Eq{"reaction.issue_id": opts.IssueID})
+ }
+ // If CommentID is > 0 add to Query
+ // If it is 0 Query ignore CommentID to select
+ // If it is -1 it explicit search of Issue Reactions where CommentID = 0
+ if opts.CommentID > 0 {
+ cond = cond.And(builder.Eq{"reaction.comment_id": opts.CommentID})
+ } else if opts.CommentID == -1 {
+ cond = cond.And(builder.Eq{"reaction.comment_id": 0})
+ }
+ if opts.UserID > 0 {
+ cond = cond.And(builder.Eq{
+ "reaction.user_id": opts.UserID,
+ "reaction.original_author_id": 0,
+ })
+ }
+ if opts.Reaction != "" {
+ cond = cond.And(builder.Eq{"reaction.type": opts.Reaction})
+ }
+
+ return cond
+}
+
+// FindCommentReactions returns a ReactionList of all reactions from an comment
+func FindCommentReactions(ctx context.Context, issueID, commentID int64) (ReactionList, int64, error) {
+ return FindReactions(ctx, FindReactionsOptions{
+ IssueID: issueID,
+ CommentID: commentID,
+ })
+}
+
+// FindIssueReactions returns a ReactionList of all reactions from an issue
+func FindIssueReactions(ctx context.Context, issueID int64, listOptions db.ListOptions) (ReactionList, int64, error) {
+ return FindReactions(ctx, FindReactionsOptions{
+ ListOptions: listOptions,
+ IssueID: issueID,
+ CommentID: -1,
+ })
+}
+
+// FindReactions returns a ReactionList of all reactions from an issue or a comment
+func FindReactions(ctx context.Context, opts FindReactionsOptions) (ReactionList, int64, error) {
+ sess := db.GetEngine(ctx).
+ Where(opts.toConds()).
+ In("reaction.`type`", setting.UI.Reactions).
+ Asc("reaction.issue_id", "reaction.comment_id", "reaction.created_unix", "reaction.id")
+ if opts.Page != 0 {
+ sess = db.SetSessionPagination(sess, &opts)
+
+ reactions := make([]*Reaction, 0, opts.PageSize)
+ count, err := sess.FindAndCount(&reactions)
+ return reactions, count, err
+ }
+
+ reactions := make([]*Reaction, 0, 10)
+ count, err := sess.FindAndCount(&reactions)
+ return reactions, count, err
+}
+
+func createReaction(ctx context.Context, opts *ReactionOptions) (*Reaction, error) {
+ reaction := &Reaction{
+ Type: opts.Type,
+ UserID: opts.DoerID,
+ IssueID: opts.IssueID,
+ CommentID: opts.CommentID,
+ }
+ findOpts := FindReactionsOptions{
+ IssueID: opts.IssueID,
+ CommentID: opts.CommentID,
+ Reaction: opts.Type,
+ UserID: opts.DoerID,
+ }
+ if findOpts.CommentID == 0 {
+ // explicit search of Issue Reactions where CommentID = 0
+ findOpts.CommentID = -1
+ }
+
+ existingR, _, err := FindReactions(ctx, findOpts)
+ if err != nil {
+ return nil, err
+ }
+ if len(existingR) > 0 {
+ return existingR[0], ErrReactionAlreadyExist{Reaction: opts.Type}
+ }
+
+ if err := db.Insert(ctx, reaction); err != nil {
+ return nil, err
+ }
+
+ return reaction, nil
+}
+
+// ReactionOptions defines options for creating or deleting reactions
+type ReactionOptions struct {
+ Type string
+ DoerID int64
+ IssueID int64
+ CommentID int64
+}
+
+// CreateReaction creates reaction for issue or comment.
+func CreateReaction(ctx context.Context, opts *ReactionOptions) (*Reaction, error) {
+ if !setting.UI.ReactionsLookup.Contains(opts.Type) {
+ return nil, ErrForbiddenIssueReaction{opts.Type}
+ }
+
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ reaction, err := createReaction(ctx, opts)
+ if err != nil {
+ return reaction, err
+ }
+
+ if err := committer.Commit(); err != nil {
+ return nil, err
+ }
+ return reaction, nil
+}
+
+// DeleteReaction deletes reaction for issue or comment.
+func DeleteReaction(ctx context.Context, opts *ReactionOptions) error {
+ reaction := &Reaction{
+ Type: opts.Type,
+ UserID: opts.DoerID,
+ IssueID: opts.IssueID,
+ CommentID: opts.CommentID,
+ }
+
+ sess := db.GetEngine(ctx).Where("original_author_id = 0")
+ if opts.CommentID == -1 {
+ reaction.CommentID = 0
+ sess.MustCols("comment_id")
+ }
+
+ _, err := sess.Delete(reaction)
+ return err
+}
+
+// DeleteIssueReaction deletes a reaction on issue.
+func DeleteIssueReaction(ctx context.Context, doerID, issueID int64, content string) error {
+ return DeleteReaction(ctx, &ReactionOptions{
+ Type: content,
+ DoerID: doerID,
+ IssueID: issueID,
+ CommentID: -1,
+ })
+}
+
+// DeleteCommentReaction deletes a reaction on comment.
+func DeleteCommentReaction(ctx context.Context, doerID, issueID, commentID int64, content string) error {
+ return DeleteReaction(ctx, &ReactionOptions{
+ Type: content,
+ DoerID: doerID,
+ IssueID: issueID,
+ CommentID: commentID,
+ })
+}
+
+// ReactionList represents list of reactions
+type ReactionList []*Reaction
+
+// HasUser check if user has reacted
+func (list ReactionList) HasUser(userID int64) bool {
+ if userID == 0 {
+ return false
+ }
+ for _, reaction := range list {
+ if reaction.OriginalAuthor == "" && reaction.UserID == userID {
+ return true
+ }
+ }
+ return false
+}
+
+// GroupByType returns reactions grouped by type
+func (list ReactionList) GroupByType() map[string]ReactionList {
+ reactions := make(map[string]ReactionList)
+ for _, reaction := range list {
+ reactions[reaction.Type] = append(reactions[reaction.Type], reaction)
+ }
+ return reactions
+}
+
+func (list ReactionList) getUserIDs() []int64 {
+ return container.FilterSlice(list, func(reaction *Reaction) (int64, bool) {
+ if reaction.OriginalAuthor != "" {
+ return 0, false
+ }
+ return reaction.UserID, true
+ })
+}
+
+func valuesUser(m map[int64]*user_model.User) []*user_model.User {
+ values := make([]*user_model.User, 0, len(m))
+ for _, v := range m {
+ values = append(values, v)
+ }
+ return values
+}
+
+// LoadUsers loads reactions' all users
+func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) {
+ if len(list) == 0 {
+ return nil, nil
+ }
+
+ userIDs := list.getUserIDs()
+ userMaps := make(map[int64]*user_model.User, len(userIDs))
+ err := db.GetEngine(ctx).
+ In("id", userIDs).
+ Find(&userMaps)
+ if err != nil {
+ return nil, fmt.Errorf("find user: %w", err)
+ }
+
+ for _, reaction := range list {
+ if reaction.OriginalAuthor != "" {
+ reaction.User = user_model.NewReplaceUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name()))
+ } else if user, ok := userMaps[reaction.UserID]; ok {
+ reaction.User = user
+ } else {
+ reaction.User = user_model.NewGhostUser()
+ }
+ }
+ return valuesUser(userMaps), nil
+}
+
+// GetFirstUsers returns first reacted user display names separated by comma
+func (list ReactionList) GetFirstUsers() string {
+ var buffer bytes.Buffer
+ rem := setting.UI.ReactionMaxUserNum
+ for _, reaction := range list {
+ if buffer.Len() > 0 {
+ buffer.WriteString(", ")
+ }
+ buffer.WriteString(reaction.User.Name)
+ if rem--; rem == 0 {
+ break
+ }
+ }
+ return buffer.String()
+}
+
+// GetMoreUserCount returns count of not shown users in reaction tooltip
+func (list ReactionList) GetMoreUserCount() int {
+ if len(list) <= setting.UI.ReactionMaxUserNum {
+ return 0
+ }
+ return len(list) - setting.UI.ReactionMaxUserNum
+}
diff --git a/models/issues/reaction_test.go b/models/issues/reaction_test.go
new file mode 100644
index 0000000..e02e6d7
--- /dev/null
+++ b/models/issues/reaction_test.go
@@ -0,0 +1,178 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func addReaction(t *testing.T, doerID, issueID, commentID int64, content string) {
+ var reaction *issues_model.Reaction
+ var err error
+ // NOTE: This doesn't do user blocking checking.
+ reaction, err = issues_model.CreateReaction(db.DefaultContext, &issues_model.ReactionOptions{
+ DoerID: doerID,
+ IssueID: issueID,
+ CommentID: commentID,
+ Type: content,
+ })
+
+ require.NoError(t, err)
+ assert.NotNil(t, reaction)
+}
+
+func TestIssueAddReaction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var issue1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, 0, "heart")
+
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID})
+}
+
+func TestIssueAddDuplicateReaction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var issue1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, 0, "heart")
+
+ reaction, err := issues_model.CreateReaction(db.DefaultContext, &issues_model.ReactionOptions{
+ DoerID: user1.ID,
+ IssueID: issue1ID,
+ Type: "heart",
+ })
+ require.Error(t, err)
+ assert.Equal(t, issues_model.ErrReactionAlreadyExist{Reaction: "heart"}, err)
+
+ existingR := unittest.AssertExistsAndLoadBean(t, &issues_model.Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID})
+ assert.Equal(t, existingR.ID, reaction.ID)
+}
+
+func TestIssueDeleteReaction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var issue1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, 0, "heart")
+
+ err := issues_model.DeleteIssueReaction(db.DefaultContext, user1.ID, issue1ID, "heart")
+ require.NoError(t, err)
+
+ unittest.AssertNotExistsBean(t, &issues_model.Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID})
+}
+
+func TestIssueReactionCount(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ setting.UI.ReactionMaxUserNum = 2
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+ ghost := user_model.NewGhostUser()
+
+ var issueID int64 = 2
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+
+ addReaction(t, user1.ID, issueID, 0, "heart")
+ addReaction(t, user2.ID, issueID, 0, "heart")
+ addReaction(t, org3.ID, issueID, 0, "heart")
+ addReaction(t, org3.ID, issueID, 0, "+1")
+ addReaction(t, user4.ID, issueID, 0, "+1")
+ addReaction(t, user4.ID, issueID, 0, "heart")
+ addReaction(t, ghost.ID, issueID, 0, "-1")
+
+ reactionsList, _, err := issues_model.FindReactions(db.DefaultContext, issues_model.FindReactionsOptions{
+ IssueID: issueID,
+ })
+ require.NoError(t, err)
+ assert.Len(t, reactionsList, 7)
+ _, err = reactionsList.LoadUsers(db.DefaultContext, repo)
+ require.NoError(t, err)
+
+ reactions := reactionsList.GroupByType()
+ assert.Len(t, reactions["heart"], 4)
+ assert.Equal(t, 2, reactions["heart"].GetMoreUserCount())
+ assert.Equal(t, user1.Name+", "+user2.Name, reactions["heart"].GetFirstUsers())
+ assert.True(t, reactions["heart"].HasUser(1))
+ assert.False(t, reactions["heart"].HasUser(5))
+ assert.False(t, reactions["heart"].HasUser(0))
+ assert.Len(t, reactions["+1"], 2)
+ assert.Equal(t, 0, reactions["+1"].GetMoreUserCount())
+ assert.Len(t, reactions["-1"], 1)
+}
+
+func TestIssueCommentAddReaction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var issue1ID int64 = 1
+ var comment1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, comment1ID, "heart")
+
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID, CommentID: comment1ID})
+}
+
+func TestIssueCommentDeleteReaction(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+
+ var issue1ID int64 = 1
+ var comment1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, comment1ID, "heart")
+ addReaction(t, user2.ID, issue1ID, comment1ID, "heart")
+ addReaction(t, org3.ID, issue1ID, comment1ID, "heart")
+ addReaction(t, user4.ID, issue1ID, comment1ID, "+1")
+
+ reactionsList, _, err := issues_model.FindReactions(db.DefaultContext, issues_model.FindReactionsOptions{
+ IssueID: issue1ID,
+ CommentID: comment1ID,
+ })
+ require.NoError(t, err)
+ assert.Len(t, reactionsList, 4)
+
+ reactions := reactionsList.GroupByType()
+ assert.Len(t, reactions["heart"], 3)
+ assert.Len(t, reactions["+1"], 1)
+}
+
+func TestIssueCommentReactionCount(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ var issue1ID int64 = 1
+ var comment1ID int64 = 1
+
+ addReaction(t, user1.ID, issue1ID, comment1ID, "heart")
+ require.NoError(t, issues_model.DeleteCommentReaction(db.DefaultContext, user1.ID, issue1ID, comment1ID, "heart"))
+
+ unittest.AssertNotExistsBean(t, &issues_model.Reaction{Type: "heart", UserID: user1.ID, IssueID: issue1ID, CommentID: comment1ID})
+}
diff --git a/models/issues/review.go b/models/issues/review.go
new file mode 100644
index 0000000..a39c120
--- /dev/null
+++ b/models/issues/review.go
@@ -0,0 +1,1056 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "slices"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+)
+
+// ErrReviewNotExist represents a "ReviewNotExist" kind of error.
+type ErrReviewNotExist struct {
+ ID int64
+}
+
+// IsErrReviewNotExist checks if an error is a ErrReviewNotExist.
+func IsErrReviewNotExist(err error) bool {
+ _, ok := err.(ErrReviewNotExist)
+ return ok
+}
+
+func (err ErrReviewNotExist) Error() string {
+ return fmt.Sprintf("review does not exist [id: %d]", err.ID)
+}
+
+func (err ErrReviewNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// ErrNotValidReviewRequest an not allowed review request modify
+type ErrNotValidReviewRequest struct {
+ Reason string
+ UserID int64
+ RepoID int64
+}
+
+// IsErrNotValidReviewRequest checks if an error is a ErrNotValidReviewRequest.
+func IsErrNotValidReviewRequest(err error) bool {
+ _, ok := err.(ErrNotValidReviewRequest)
+ return ok
+}
+
+func (err ErrNotValidReviewRequest) Error() string {
+ return fmt.Sprintf("%s [user_id: %d, repo_id: %d]",
+ err.Reason,
+ err.UserID,
+ err.RepoID)
+}
+
+func (err ErrNotValidReviewRequest) Unwrap() error {
+ return util.ErrInvalidArgument
+}
+
+// ErrReviewRequestOnClosedPR represents an error when an user tries to request a re-review on a closed or merged PR.
+type ErrReviewRequestOnClosedPR struct{}
+
+// IsErrReviewRequestOnClosedPR checks if an error is an ErrReviewRequestOnClosedPR.
+func IsErrReviewRequestOnClosedPR(err error) bool {
+ _, ok := err.(ErrReviewRequestOnClosedPR)
+ return ok
+}
+
+func (err ErrReviewRequestOnClosedPR) Error() string {
+ return "cannot request a re-review on a closed or merged PR"
+}
+
+func (err ErrReviewRequestOnClosedPR) Unwrap() error {
+ return util.ErrPermissionDenied
+}
+
+// ReviewType defines the sort of feedback a review gives
+type ReviewType int
+
+// ReviewTypeUnknown unknown review type
+const ReviewTypeUnknown ReviewType = -1
+
+const (
+ // ReviewTypePending is a review which is not published yet
+ ReviewTypePending ReviewType = iota
+ // ReviewTypeApprove approves changes
+ ReviewTypeApprove
+ // ReviewTypeComment gives general feedback
+ ReviewTypeComment
+ // ReviewTypeReject gives feedback blocking merge
+ ReviewTypeReject
+ // ReviewTypeRequest request review from others
+ ReviewTypeRequest
+)
+
+// Icon returns the corresponding icon for the review type
+func (rt ReviewType) Icon() string {
+ switch rt {
+ case ReviewTypeApprove:
+ return "check"
+ case ReviewTypeReject:
+ return "diff"
+ case ReviewTypeComment:
+ return "comment"
+ case ReviewTypeRequest:
+ return "dot-fill"
+ default:
+ return "comment"
+ }
+}
+
+// Review represents collection of code comments giving feedback for a PR
+type Review struct {
+ ID int64 `xorm:"pk autoincr"`
+ Type ReviewType
+ Reviewer *user_model.User `xorm:"-"`
+ ReviewerID int64 `xorm:"index"`
+ ReviewerTeamID int64 `xorm:"NOT NULL DEFAULT 0"`
+ ReviewerTeam *organization.Team `xorm:"-"`
+ OriginalAuthor string
+ OriginalAuthorID int64
+ Issue *Issue `xorm:"-"`
+ IssueID int64 `xorm:"index"`
+ Content string `xorm:"TEXT"`
+ // Official is a review made by an assigned approver (counts towards approval)
+ Official bool `xorm:"NOT NULL DEFAULT false"`
+ CommitID string `xorm:"VARCHAR(64)"`
+ Stale bool `xorm:"NOT NULL DEFAULT false"`
+ Dismissed bool `xorm:"NOT NULL DEFAULT false"`
+
+ CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
+ UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
+
+ // CodeComments are the initial code comments of the review
+ CodeComments CodeComments `xorm:"-"`
+
+ Comments []*Comment `xorm:"-"`
+}
+
+func init() {
+ db.RegisterModel(new(Review))
+}
+
+// LoadCodeComments loads CodeComments
+func (r *Review) LoadCodeComments(ctx context.Context) (err error) {
+ if r.CodeComments != nil {
+ return err
+ }
+ if err = r.LoadIssue(ctx); err != nil {
+ return err
+ }
+ r.CodeComments, err = fetchCodeCommentsByReview(ctx, r.Issue, nil, r, false)
+ return err
+}
+
+func (r *Review) LoadIssue(ctx context.Context) (err error) {
+ if r.Issue != nil {
+ return err
+ }
+ r.Issue, err = GetIssueByID(ctx, r.IssueID)
+ return err
+}
+
+// LoadReviewer loads reviewer
+func (r *Review) LoadReviewer(ctx context.Context) (err error) {
+ if r.ReviewerID == 0 || r.Reviewer != nil {
+ return err
+ }
+ r.Reviewer, err = user_model.GetPossibleUserByID(ctx, r.ReviewerID)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ return fmt.Errorf("GetPossibleUserByID [%d]: %w", r.ReviewerID, err)
+ }
+ r.ReviewerID = user_model.GhostUserID
+ r.Reviewer = user_model.NewGhostUser()
+ return nil
+ }
+ return err
+}
+
+// LoadReviewerTeam loads reviewer team
+func (r *Review) LoadReviewerTeam(ctx context.Context) (err error) {
+ if r.ReviewerTeamID == 0 || r.ReviewerTeam != nil {
+ return nil
+ }
+
+ r.ReviewerTeam, err = organization.GetTeamByID(ctx, r.ReviewerTeamID)
+ return err
+}
+
+// LoadAttributes loads all attributes except CodeComments
+func (r *Review) LoadAttributes(ctx context.Context) (err error) {
+ if err = r.LoadIssue(ctx); err != nil {
+ return err
+ }
+ if err = r.LoadCodeComments(ctx); err != nil {
+ return err
+ }
+ if err = r.LoadReviewer(ctx); err != nil {
+ return err
+ }
+ if err = r.LoadReviewerTeam(ctx); err != nil {
+ return err
+ }
+ return err
+}
+
+func (r *Review) HTMLTypeColorName() string {
+ switch r.Type {
+ case ReviewTypeApprove:
+ if r.Stale {
+ return "yellow"
+ }
+ return "green"
+ case ReviewTypeComment:
+ return "grey"
+ case ReviewTypeReject:
+ return "red"
+ case ReviewTypeRequest:
+ return "yellow"
+ }
+ return "grey"
+}
+
+// GetReviewByID returns the review by the given ID
+func GetReviewByID(ctx context.Context, id int64) (*Review, error) {
+ review := new(Review)
+ if has, err := db.GetEngine(ctx).ID(id).Get(review); err != nil {
+ return nil, err
+ } else if !has {
+ return nil, ErrReviewNotExist{ID: id}
+ }
+ return review, nil
+}
+
+// CreateReviewOptions represent the options to create a review. Type, Issue and Reviewer are required.
+type CreateReviewOptions struct {
+ Content string
+ Type ReviewType
+ Issue *Issue
+ Reviewer *user_model.User
+ ReviewerTeam *organization.Team
+ Official bool
+ CommitID string
+ Stale bool
+}
+
+// IsOfficialReviewer check if at least one of the provided reviewers can make official reviews in issue (counts towards required approvals)
+func IsOfficialReviewer(ctx context.Context, issue *Issue, reviewer *user_model.User) (bool, error) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return false, err
+ }
+
+ pr := issue.PullRequest
+ rule, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+ if rule == nil {
+ // if no rule is found, then user with write access can make official reviews
+ err := pr.LoadBaseRepo(ctx)
+ if err != nil {
+ return false, err
+ }
+ writeAccess, err := access_model.HasAccessUnit(ctx, reviewer, pr.BaseRepo, unit.TypeCode, perm.AccessModeWrite)
+ if err != nil {
+ return false, err
+ }
+ return writeAccess, nil
+ }
+
+ official, err := git_model.IsUserOfficialReviewer(ctx, rule, reviewer)
+ if official || err != nil {
+ return official, err
+ }
+
+ return false, nil
+}
+
+// IsOfficialReviewerTeam check if reviewer in this team can make official reviews in issue (counts towards required approvals)
+func IsOfficialReviewerTeam(ctx context.Context, issue *Issue, team *organization.Team) (bool, error) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return false, err
+ }
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, issue.PullRequest.BaseRepoID, issue.PullRequest.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+ if pb == nil {
+ return false, nil
+ }
+
+ if !pb.EnableApprovalsWhitelist {
+ return team.UnitAccessMode(ctx, unit.TypeCode) >= perm.AccessModeWrite, nil
+ }
+
+ return slices.Contains(pb.ApprovalsWhitelistTeamIDs, team.ID), nil
+}
+
+// CreateReview creates a new review based on opts
+func CreateReview(ctx context.Context, opts CreateReviewOptions) (*Review, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ review := &Review{
+ Issue: opts.Issue,
+ IssueID: opts.Issue.ID,
+ Reviewer: opts.Reviewer,
+ ReviewerTeam: opts.ReviewerTeam,
+ Content: opts.Content,
+ Official: opts.Official,
+ CommitID: opts.CommitID,
+ Stale: opts.Stale,
+ }
+
+ if opts.Reviewer != nil {
+ review.Type = opts.Type
+ review.ReviewerID = opts.Reviewer.ID
+
+ reviewCond := builder.Eq{"reviewer_id": opts.Reviewer.ID, "issue_id": opts.Issue.ID}
+ // make sure user review requests are cleared
+ if opts.Type != ReviewTypePending {
+ if _, err := sess.Where(reviewCond.And(builder.Eq{"type": ReviewTypeRequest})).Delete(new(Review)); err != nil {
+ return nil, err
+ }
+ }
+ // make sure if the created review gets dismissed no old review surface
+ // other types can be ignored, as they don't affect branch protection
+ if opts.Type == ReviewTypeApprove || opts.Type == ReviewTypeReject {
+ if _, err := sess.Where(reviewCond.And(builder.In("type", ReviewTypeApprove, ReviewTypeReject))).
+ Cols("dismissed").Update(&Review{Dismissed: true}); err != nil {
+ return nil, err
+ }
+ }
+ } else if opts.ReviewerTeam != nil {
+ review.Type = ReviewTypeRequest
+ review.ReviewerTeamID = opts.ReviewerTeam.ID
+ } else {
+ return nil, fmt.Errorf("provide either reviewer or reviewer team")
+ }
+
+ if _, err := sess.Insert(review); err != nil {
+ return nil, err
+ }
+ return review, committer.Commit()
+}
+
+// GetCurrentReview returns the current pending review of reviewer for given issue
+func GetCurrentReview(ctx context.Context, reviewer *user_model.User, issue *Issue) (*Review, error) {
+ if reviewer == nil {
+ return nil, nil
+ }
+ reviews, err := FindReviews(ctx, FindReviewOptions{
+ Types: []ReviewType{ReviewTypePending},
+ IssueID: issue.ID,
+ ReviewerID: reviewer.ID,
+ })
+ if err != nil {
+ return nil, err
+ }
+ if len(reviews) == 0 {
+ return nil, ErrReviewNotExist{}
+ }
+ reviews[0].Reviewer = reviewer
+ reviews[0].Issue = issue
+ return reviews[0], nil
+}
+
+// ReviewExists returns whether a review exists for a particular line of code in the PR
+func ReviewExists(ctx context.Context, issue *Issue, treePath string, line int64) (bool, error) {
+ return db.GetEngine(ctx).Cols("id").Exist(&Comment{IssueID: issue.ID, TreePath: treePath, Line: line, Type: CommentTypeCode})
+}
+
+// ContentEmptyErr represents an content empty error
+type ContentEmptyErr struct{}
+
+func (ContentEmptyErr) Error() string {
+ return "Review content is empty"
+}
+
+// IsContentEmptyErr returns true if err is a ContentEmptyErr
+func IsContentEmptyErr(err error) bool {
+ _, ok := err.(ContentEmptyErr)
+ return ok
+}
+
+// SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist
+func SubmitReview(ctx context.Context, doer *user_model.User, issue *Issue, reviewType ReviewType, content, commitID string, stale bool, attachmentUUIDs []string) (*Review, *Comment, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ official := false
+
+ review, err := GetCurrentReview(ctx, doer, issue)
+ if err != nil {
+ if !IsErrReviewNotExist(err) {
+ return nil, nil, err
+ }
+
+ if reviewType != ReviewTypeApprove && len(strings.TrimSpace(content)) == 0 {
+ return nil, nil, ContentEmptyErr{}
+ }
+
+ if reviewType == ReviewTypeApprove || reviewType == ReviewTypeReject {
+ // Only reviewers latest review of type approve and reject shall count as "official", so existing reviews needs to be cleared
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_id=?", false, issue.ID, doer.ID); err != nil {
+ return nil, nil, err
+ }
+ if official, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ // No current review. Create a new one!
+ if review, err = CreateReview(ctx, CreateReviewOptions{
+ Type: reviewType,
+ Issue: issue,
+ Reviewer: doer,
+ Content: content,
+ Official: official,
+ CommitID: commitID,
+ Stale: stale,
+ }); err != nil {
+ return nil, nil, err
+ }
+ } else {
+ if err := review.LoadCodeComments(ctx); err != nil {
+ return nil, nil, err
+ }
+ if reviewType != ReviewTypeApprove && len(review.CodeComments) == 0 && len(strings.TrimSpace(content)) == 0 {
+ return nil, nil, ContentEmptyErr{}
+ }
+
+ if reviewType == ReviewTypeApprove || reviewType == ReviewTypeReject {
+ // Only reviewers latest review of type approve and reject shall count as "official", so existing reviews needs to be cleared
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_id=?", false, issue.ID, doer.ID); err != nil {
+ return nil, nil, err
+ }
+ if official, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ review.Official = official
+ review.Issue = issue
+ review.Content = content
+ review.Type = reviewType
+ review.CommitID = commitID
+ review.Stale = stale
+
+ if _, err := sess.ID(review.ID).Cols("content, type, official, commit_id, stale").Update(review); err != nil {
+ return nil, nil, err
+ }
+ }
+
+ comm, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReview,
+ Doer: doer,
+ Content: review.Content,
+ Issue: issue,
+ Repo: issue.Repo,
+ ReviewID: review.ID,
+ Attachments: attachmentUUIDs,
+ })
+ if err != nil || comm == nil {
+ return nil, nil, err
+ }
+
+ // try to remove team review request if need
+ if issue.Repo.Owner.IsOrganization() && (reviewType == ReviewTypeApprove || reviewType == ReviewTypeReject) {
+ teamReviewRequests := make([]*Review, 0, 10)
+ if err := sess.SQL("SELECT * FROM review WHERE issue_id = ? AND reviewer_team_id > 0 AND type = ?", issue.ID, ReviewTypeRequest).Find(&teamReviewRequests); err != nil {
+ return nil, nil, err
+ }
+
+ for _, teamReviewRequest := range teamReviewRequests {
+ ok, err := organization.IsTeamMember(ctx, issue.Repo.OwnerID, teamReviewRequest.ReviewerTeamID, doer.ID)
+ if err != nil {
+ return nil, nil, err
+ } else if !ok {
+ continue
+ }
+
+ if _, err := db.DeleteByID[Review](ctx, teamReviewRequest.ID); err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+
+ comm.Review = review
+ return review, comm, committer.Commit()
+}
+
+// GetReviewByIssueIDAndUserID get the latest review of reviewer for a pull request
+func GetReviewByIssueIDAndUserID(ctx context.Context, issueID, userID int64) (*Review, error) {
+ review := new(Review)
+
+ has, err := db.GetEngine(ctx).Where(
+ builder.In("type", ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest).
+ And(builder.Eq{"issue_id": issueID, "reviewer_id": userID, "original_author_id": 0})).
+ Desc("id").
+ Get(review)
+ if err != nil {
+ return nil, err
+ }
+
+ if !has {
+ return nil, ErrReviewNotExist{}
+ }
+
+ return review, nil
+}
+
+// GetTeamReviewerByIssueIDAndTeamID get the latest review request of reviewer team for a pull request
+func GetTeamReviewerByIssueIDAndTeamID(ctx context.Context, issueID, teamID int64) (*Review, error) {
+ review := new(Review)
+
+ has, err := db.GetEngine(ctx).Where(builder.Eq{"issue_id": issueID, "reviewer_team_id": teamID}).
+ Desc("id").
+ Get(review)
+ if err != nil {
+ return nil, err
+ }
+
+ if !has {
+ return nil, ErrReviewNotExist{0}
+ }
+
+ return review, err
+}
+
+// MarkReviewsAsStale marks existing reviews as stale
+func MarkReviewsAsStale(ctx context.Context, issueID int64) (err error) {
+ _, err = db.GetEngine(ctx).Exec("UPDATE `review` SET stale=? WHERE issue_id=?", true, issueID)
+
+ return err
+}
+
+// MarkReviewsAsNotStale marks existing reviews as not stale for a giving commit SHA
+func MarkReviewsAsNotStale(ctx context.Context, issueID int64, commitID string) (err error) {
+ _, err = db.GetEngine(ctx).Exec("UPDATE `review` SET stale=? WHERE issue_id=? AND commit_id=?", false, issueID, commitID)
+
+ return err
+}
+
+// DismissReview change the dismiss status of a review
+func DismissReview(ctx context.Context, review *Review, isDismiss bool) (err error) {
+ if review.Dismissed == isDismiss || (review.Type != ReviewTypeApprove && review.Type != ReviewTypeReject) {
+ return nil
+ }
+
+ review.Dismissed = isDismiss
+
+ if review.ID == 0 {
+ return ErrReviewNotExist{}
+ }
+
+ _, err = db.GetEngine(ctx).ID(review.ID).Cols("dismissed").Update(review)
+
+ return err
+}
+
+// InsertReviews inserts review and review comments
+func InsertReviews(ctx context.Context, reviews []*Review) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ for _, review := range reviews {
+ if _, err := sess.NoAutoTime().Insert(review); err != nil {
+ return err
+ }
+
+ if _, err := sess.NoAutoTime().Insert(&Comment{
+ Type: CommentTypeReview,
+ Content: review.Content,
+ PosterID: review.ReviewerID,
+ OriginalAuthor: review.OriginalAuthor,
+ OriginalAuthorID: review.OriginalAuthorID,
+ IssueID: review.IssueID,
+ ReviewID: review.ID,
+ CreatedUnix: review.CreatedUnix,
+ UpdatedUnix: review.UpdatedUnix,
+ }); err != nil {
+ return err
+ }
+
+ for _, c := range review.Comments {
+ c.ReviewID = review.ID
+ }
+
+ if len(review.Comments) > 0 {
+ if _, err := sess.NoAutoTime().Insert(review.Comments); err != nil {
+ return err
+ }
+ }
+ }
+
+ return committer.Commit()
+}
+
+// AddReviewRequest add a review request from one reviewer
+func AddReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+ sess := db.GetEngine(ctx)
+
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review != nil {
+ // skip it when reviewer hase been request to review
+ if review.Type == ReviewTypeRequest {
+ return nil, committer.Commit() // still commit the transaction, or committer.Close() will rollback it, even if it's a reused transaction.
+ }
+
+ if issue.IsClosed {
+ return nil, ErrReviewRequestOnClosedPR{}
+ }
+
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, err
+ }
+ if issue.PullRequest.HasMerged {
+ return nil, ErrReviewRequestOnClosedPR{}
+ }
+ }
+ }
+
+ // if the reviewer is an official reviewer,
+ // remove the official flag in the all previous reviews
+ official, err := IsOfficialReviewer(ctx, issue, reviewer)
+ if err != nil {
+ return nil, err
+ } else if official {
+ if _, err := sess.Exec("UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_id=?", false, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
+ }
+
+ review, err = CreateReview(ctx, CreateReviewOptions{
+ Type: ReviewTypeRequest,
+ Issue: issue,
+ Reviewer: reviewer,
+ Official: official,
+ Stale: false,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: false, // Use RemovedAssignee as !isRequest
+ AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
+ ReviewID: review.ID,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ // func caller use the created comment to retrieve created review too.
+ comment.Review = review
+
+ return comment, committer.Commit()
+}
+
+// RemoveReviewRequest remove a review request from one reviewer
+func RemoveReviewRequest(ctx context.Context, issue *Issue, reviewer, doer *user_model.User) (*Comment, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review == nil || review.Type != ReviewTypeRequest {
+ return nil, nil
+ }
+
+ if _, err = db.DeleteByBean(ctx, review); err != nil {
+ return nil, err
+ }
+
+ official, err := IsOfficialReviewer(ctx, issue, reviewer)
+ if err != nil {
+ return nil, err
+ } else if official {
+ if err := restoreLatestOfficialReview(ctx, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
+ }
+
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: true, // Use RemovedAssignee as !isRequest
+ AssigneeID: reviewer.ID, // Use AssigneeID as reviewer ID
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ return comment, committer.Commit()
+}
+
+// Recalculate the latest official review for reviewer
+func restoreLatestOfficialReview(ctx context.Context, issueID, reviewerID int64) error {
+ review, err := GetReviewByIssueIDAndUserID(ctx, issueID, reviewerID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return err
+ }
+
+ if review != nil {
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE id=?", true, review.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// AddTeamReviewRequest add a review request from one team
+func AddTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ // This team already has been requested to review - therefore skip this.
+ if review != nil {
+ return nil, nil
+ }
+
+ official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
+ if err != nil {
+ return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
+ } else if !official {
+ if official, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
+ return nil, fmt.Errorf("isOfficialReviewer(): %w", err)
+ }
+ }
+
+ if review, err = CreateReview(ctx, CreateReviewOptions{
+ Type: ReviewTypeRequest,
+ Issue: issue,
+ ReviewerTeam: reviewer,
+ Official: official,
+ Stale: false,
+ }); err != nil {
+ return nil, err
+ }
+
+ if official {
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE issue_id=? AND reviewer_team_id=?", false, issue.ID, reviewer.ID); err != nil {
+ return nil, err
+ }
+ }
+
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: false, // Use RemovedAssignee as !isRequest
+ AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
+ ReviewID: review.ID,
+ })
+ if err != nil {
+ return nil, fmt.Errorf("CreateComment(): %w", err)
+ }
+
+ return comment, committer.Commit()
+}
+
+// RemoveTeamReviewRequest remove a review request from one team
+func RemoveTeamReviewRequest(ctx context.Context, issue *Issue, reviewer *organization.Team, doer *user_model.User) (*Comment, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ review, err := GetTeamReviewerByIssueIDAndTeamID(ctx, issue.ID, reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review == nil {
+ return nil, nil
+ }
+
+ if _, err = db.DeleteByBean(ctx, review); err != nil {
+ return nil, err
+ }
+
+ official, err := IsOfficialReviewerTeam(ctx, issue, reviewer)
+ if err != nil {
+ return nil, fmt.Errorf("isOfficialReviewerTeam(): %w", err)
+ }
+
+ if official {
+ // recalculate which is the latest official review from that team
+ review, err := GetReviewByIssueIDAndUserID(ctx, issue.ID, -reviewer.ID)
+ if err != nil && !IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review != nil {
+ if _, err := db.Exec(ctx, "UPDATE `review` SET official=? WHERE id=?", true, review.ID); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ if doer == nil {
+ return nil, committer.Commit()
+ }
+
+ comment, err := CreateComment(ctx, &CreateCommentOptions{
+ Type: CommentTypeReviewRequest,
+ Doer: doer,
+ Repo: issue.Repo,
+ Issue: issue,
+ RemovedAssignee: true, // Use RemovedAssignee as !isRequest
+ AssigneeTeamID: reviewer.ID, // Use AssigneeTeamID as reviewer team ID
+ })
+ if err != nil {
+ return nil, fmt.Errorf("CreateComment(): %w", err)
+ }
+
+ return comment, committer.Commit()
+}
+
+// MarkConversation Add or remove Conversation mark for a code comment
+func MarkConversation(ctx context.Context, comment *Comment, doer *user_model.User, isResolve bool) (err error) {
+ if comment.Type != CommentTypeCode {
+ return nil
+ }
+
+ if isResolve {
+ if comment.ResolveDoerID != 0 {
+ return nil
+ }
+
+ if _, err = db.GetEngine(ctx).Exec("UPDATE `comment` SET resolve_doer_id=? WHERE id=?", doer.ID, comment.ID); err != nil {
+ return err
+ }
+ } else {
+ if comment.ResolveDoerID == 0 {
+ return nil
+ }
+
+ if _, err = db.GetEngine(ctx).Exec("UPDATE `comment` SET resolve_doer_id=? WHERE id=?", 0, comment.ID); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// CanMarkConversation Add or remove Conversation mark for a code comment permission check
+// the PR writer , offfcial reviewer and poster can do it
+func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) {
+ if doer == nil || issue == nil {
+ return false, fmt.Errorf("issue or doer is nil")
+ }
+
+ if doer.ID != issue.PosterID {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return false, err
+ }
+
+ p, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
+ if err != nil {
+ return false, err
+ }
+
+ permResult = p.CanAccess(perm.AccessModeWrite, unit.TypePullRequests)
+ if !permResult {
+ if permResult, err = IsOfficialReviewer(ctx, issue, doer); err != nil {
+ return false, err
+ }
+ }
+
+ if !permResult {
+ return false, nil
+ }
+ }
+
+ return true, nil
+}
+
+// DeleteReview delete a review and it's code comments
+func DeleteReview(ctx context.Context, r *Review) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if r.ID == 0 {
+ return fmt.Errorf("review is not allowed to be 0")
+ }
+
+ if r.Type == ReviewTypeRequest {
+ return fmt.Errorf("review request can not be deleted using this method")
+ }
+
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
+
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
+
+ opts = FindCommentsOptions{
+ Type: CommentTypeReview,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
+
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
+
+ opts = FindCommentsOptions{
+ Type: CommentTypeDismissReview,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
+
+ if _, err := db.Delete[Comment](ctx, opts); err != nil {
+ return err
+ }
+
+ if _, err := db.DeleteByID[Review](ctx, r.ID); err != nil {
+ return err
+ }
+
+ if r.Official {
+ if err := restoreLatestOfficialReview(ctx, r.IssueID, r.ReviewerID); err != nil {
+ return err
+ }
+ }
+
+ return committer.Commit()
+}
+
+// GetCodeCommentsCount return count of CodeComments a Review has
+func (r *Review) GetCodeCommentsCount(ctx context.Context) int {
+ opts := FindCommentsOptions{
+ Type: CommentTypeCode,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
+ conds := opts.ToConds()
+ if r.ID == 0 {
+ conds = conds.And(builder.Eq{"invalidated": false})
+ }
+
+ count, err := db.GetEngine(ctx).Where(conds).Count(new(Comment))
+ if err != nil {
+ return 0
+ }
+ return int(count)
+}
+
+// HTMLURL formats a URL-string to the related review issue-comment
+func (r *Review) HTMLURL(ctx context.Context) string {
+ opts := FindCommentsOptions{
+ Type: CommentTypeReview,
+ IssueID: r.IssueID,
+ ReviewID: r.ID,
+ }
+ comment := new(Comment)
+ has, err := db.GetEngine(ctx).Where(opts.ToConds()).Get(comment)
+ if err != nil || !has {
+ return ""
+ }
+ return comment.HTMLURL(ctx)
+}
+
+// RemapExternalUser ExternalUserRemappable interface
+func (r *Review) RemapExternalUser(externalName string, externalID, userID int64) error {
+ r.OriginalAuthor = externalName
+ r.OriginalAuthorID = externalID
+ r.ReviewerID = userID
+ return nil
+}
+
+// GetUserID ExternalUserRemappable interface
+func (r *Review) GetUserID() int64 { return r.ReviewerID }
+
+// GetExternalName ExternalUserRemappable interface
+func (r *Review) GetExternalName() string { return r.OriginalAuthor }
+
+// GetExternalID ExternalUserRemappable interface
+func (r *Review) GetExternalID() int64 { return r.OriginalAuthorID }
+
+// UpdateReviewsMigrationsByType updates reviews' migrations information via given git service type and original id and poster id
+func UpdateReviewsMigrationsByType(ctx context.Context, tp structs.GitServiceType, originalAuthorID string, posterID int64) error {
+ _, err := db.GetEngine(ctx).Table("review").
+ Where("original_author_id = ?", originalAuthorID).
+ And(migratedIssueCond(tp)).
+ Update(map[string]any{
+ "reviewer_id": posterID,
+ "original_author": "",
+ "original_author_id": 0,
+ })
+ return err
+}
diff --git a/models/issues/review_list.go b/models/issues/review_list.go
new file mode 100644
index 0000000..a5ceb21
--- /dev/null
+++ b/models/issues/review_list.go
@@ -0,0 +1,200 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ organization_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/optional"
+
+ "xorm.io/builder"
+)
+
+type ReviewList []*Review
+
+// LoadReviewers loads reviewers
+func (reviews ReviewList) LoadReviewers(ctx context.Context) error {
+ reviewerIDs := make([]int64, len(reviews))
+ for i := 0; i < len(reviews); i++ {
+ reviewerIDs[i] = reviews[i].ReviewerID
+ }
+ reviewers, err := user_model.GetPossibleUserByIDs(ctx, reviewerIDs)
+ if err != nil {
+ return err
+ }
+
+ userMap := make(map[int64]*user_model.User, len(reviewers))
+ for _, reviewer := range reviewers {
+ userMap[reviewer.ID] = reviewer
+ }
+ for _, review := range reviews {
+ review.Reviewer = userMap[review.ReviewerID]
+ }
+ return nil
+}
+
+// LoadReviewersTeams loads reviewers teams
+func (reviews ReviewList) LoadReviewersTeams(ctx context.Context) error {
+ reviewersTeamsIDs := make([]int64, 0)
+ for _, review := range reviews {
+ if review.ReviewerTeamID != 0 {
+ reviewersTeamsIDs = append(reviewersTeamsIDs, review.ReviewerTeamID)
+ }
+ }
+
+ teamsMap := make(map[int64]*organization_model.Team, 0)
+ for _, teamID := range reviewersTeamsIDs {
+ team, err := organization_model.GetTeamByID(ctx, teamID)
+ if err != nil {
+ return err
+ }
+
+ teamsMap[teamID] = team
+ }
+
+ for _, review := range reviews {
+ if review.ReviewerTeamID != 0 {
+ review.ReviewerTeam = teamsMap[review.ReviewerTeamID]
+ }
+ }
+
+ return nil
+}
+
+func (reviews ReviewList) LoadIssues(ctx context.Context) error {
+ issueIDs := container.FilterSlice(reviews, func(review *Review) (int64, bool) {
+ return review.IssueID, true
+ })
+
+ issues, err := GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ return err
+ }
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ return err
+ }
+ issueMap := make(map[int64]*Issue, len(issues))
+ for _, issue := range issues {
+ issueMap[issue.ID] = issue
+ }
+
+ for _, review := range reviews {
+ review.Issue = issueMap[review.IssueID]
+ }
+ return nil
+}
+
+// FindReviewOptions represent possible filters to find reviews
+type FindReviewOptions struct {
+ db.ListOptions
+ Types []ReviewType
+ IssueID int64
+ ReviewerID int64
+ OfficialOnly bool
+ Dismissed optional.Option[bool]
+}
+
+func (opts *FindReviewOptions) toCond() builder.Cond {
+ cond := builder.NewCond()
+ if opts.IssueID > 0 {
+ cond = cond.And(builder.Eq{"issue_id": opts.IssueID})
+ }
+ if opts.ReviewerID > 0 {
+ cond = cond.And(builder.Eq{"reviewer_id": opts.ReviewerID})
+ }
+ if len(opts.Types) > 0 {
+ cond = cond.And(builder.In("type", opts.Types))
+ }
+ if opts.OfficialOnly {
+ cond = cond.And(builder.Eq{"official": true})
+ }
+ if opts.Dismissed.Has() {
+ cond = cond.And(builder.Eq{"dismissed": opts.Dismissed.Value()})
+ }
+ return cond
+}
+
+// FindReviews returns reviews passing FindReviewOptions
+func FindReviews(ctx context.Context, opts FindReviewOptions) (ReviewList, error) {
+ reviews := make([]*Review, 0, 10)
+ sess := db.GetEngine(ctx).Where(opts.toCond())
+ if opts.Page > 0 && !opts.IsListAll() {
+ sess = db.SetSessionPagination(sess, &opts)
+ }
+ return reviews, sess.
+ Asc("created_unix").
+ Asc("id").
+ Find(&reviews)
+}
+
+// FindLatestReviews returns only latest reviews per user, passing FindReviewOptions
+func FindLatestReviews(ctx context.Context, opts FindReviewOptions) (ReviewList, error) {
+ reviews := make([]*Review, 0, 10)
+ cond := opts.toCond()
+ sess := db.GetEngine(ctx).Where(cond)
+ if opts.Page > 0 {
+ sess = db.SetSessionPagination(sess, &opts)
+ }
+
+ sess.In("id", builder.
+ Select("max(id)").
+ From("review").
+ Where(cond).
+ GroupBy("reviewer_id"))
+
+ return reviews, sess.
+ Asc("created_unix").
+ Asc("id").
+ Find(&reviews)
+}
+
+// CountReviews returns count of reviews passing FindReviewOptions
+func CountReviews(ctx context.Context, opts FindReviewOptions) (int64, error) {
+ return db.GetEngine(ctx).Where(opts.toCond()).Count(&Review{})
+}
+
+// GetReviewersFromOriginalAuthorsByIssueID gets the latest review of each original authors for a pull request
+func GetReviewersFromOriginalAuthorsByIssueID(ctx context.Context, issueID int64) (ReviewList, error) {
+ reviews := make([]*Review, 0, 10)
+
+ // Get latest review of each reviewer, sorted in order they were made
+ if err := db.GetEngine(ctx).SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND original_author_id <> 0 GROUP BY issue_id, original_author_id) ORDER BY review.updated_unix ASC",
+ issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest).
+ Find(&reviews); err != nil {
+ return nil, err
+ }
+
+ return reviews, nil
+}
+
+// GetReviewsByIssueID gets the latest review of each reviewer for a pull request
+func GetReviewsByIssueID(ctx context.Context, issueID int64) (ReviewList, error) {
+ reviews := make([]*Review, 0, 10)
+
+ sess := db.GetEngine(ctx)
+
+ // Get latest review of each reviewer, sorted in order they were made
+ if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id = 0 AND type in (?, ?, ?) AND dismissed = ? AND original_author_id = 0 GROUP BY issue_id, reviewer_id) ORDER BY review.updated_unix ASC",
+ issueID, ReviewTypeApprove, ReviewTypeReject, ReviewTypeRequest, false).
+ Find(&reviews); err != nil {
+ return nil, err
+ }
+
+ teamReviewRequests := make([]*Review, 0, 5)
+ if err := sess.SQL("SELECT * FROM review WHERE id IN (SELECT max(id) as id FROM review WHERE issue_id = ? AND reviewer_team_id <> 0 AND original_author_id = 0 GROUP BY issue_id, reviewer_team_id) ORDER BY review.updated_unix ASC",
+ issueID).
+ Find(&teamReviewRequests); err != nil {
+ return nil, err
+ }
+
+ if len(teamReviewRequests) > 0 {
+ reviews = append(reviews, teamReviewRequests...)
+ }
+
+ return reviews, nil
+}
diff --git a/models/issues/review_test.go b/models/issues/review_test.go
new file mode 100644
index 0000000..51cb940
--- /dev/null
+++ b/models/issues/review_test.go
@@ -0,0 +1,321 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetReviewByID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ review, err := issues_model.GetReviewByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.Equal(t, "Demo Review", review.Content)
+ assert.Equal(t, issues_model.ReviewTypeApprove, review.Type)
+
+ _, err = issues_model.GetReviewByID(db.DefaultContext, 23892)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrReviewNotExist(err), "IsErrReviewNotExist")
+}
+
+func TestReview_LoadAttributes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 1})
+ require.NoError(t, review.LoadAttributes(db.DefaultContext))
+ assert.NotNil(t, review.Issue)
+ assert.NotNil(t, review.Reviewer)
+
+ invalidReview1 := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 2})
+ require.Error(t, invalidReview1.LoadAttributes(db.DefaultContext))
+
+ invalidReview2 := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 3})
+ require.Error(t, invalidReview2.LoadAttributes(db.DefaultContext))
+}
+
+func TestReview_LoadCodeComments(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 4})
+ require.NoError(t, review.LoadAttributes(db.DefaultContext))
+ require.NoError(t, review.LoadCodeComments(db.DefaultContext))
+ assert.Len(t, review.CodeComments, 1)
+ assert.Equal(t, int64(4), review.CodeComments["README.md"][int64(4)][0].Line)
+}
+
+func TestReviewType_Icon(t *testing.T) {
+ assert.Equal(t, "check", issues_model.ReviewTypeApprove.Icon())
+ assert.Equal(t, "diff", issues_model.ReviewTypeReject.Icon())
+ assert.Equal(t, "comment", issues_model.ReviewTypeComment.Icon())
+ assert.Equal(t, "comment", issues_model.ReviewTypeUnknown.Icon())
+ assert.Equal(t, "dot-fill", issues_model.ReviewTypeRequest.Icon())
+ assert.Equal(t, "comment", issues_model.ReviewType(6).Icon())
+}
+
+func TestFindReviews(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ reviews, err := issues_model.FindReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ Types: []issues_model.ReviewType{issues_model.ReviewTypeApprove},
+ IssueID: 2,
+ ReviewerID: 1,
+ })
+ require.NoError(t, err)
+ assert.Len(t, reviews, 1)
+ assert.Equal(t, "Demo Review", reviews[0].Content)
+}
+
+func TestFindLatestReviews(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ reviews, err := issues_model.FindLatestReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ Types: []issues_model.ReviewType{issues_model.ReviewTypeApprove},
+ IssueID: 11,
+ })
+ require.NoError(t, err)
+ assert.Len(t, reviews, 2)
+ assert.Equal(t, "duplicate review from user5 (latest)", reviews[0].Content)
+ assert.Equal(t, "singular review from org6 and final review for this pr", reviews[1].Content)
+}
+
+func TestGetCurrentReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ review, err := issues_model.GetCurrentReview(db.DefaultContext, user, issue)
+ require.NoError(t, err)
+ assert.NotNil(t, review)
+ assert.Equal(t, issues_model.ReviewTypePending, review.Type)
+ assert.Equal(t, "Pending Review", review.Content)
+
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 7})
+ review2, err := issues_model.GetCurrentReview(db.DefaultContext, user2, issue)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrReviewNotExist(err))
+ assert.Nil(t, review2)
+}
+
+func TestCreateReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ review, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Content: "New Review",
+ Type: issues_model.ReviewTypePending,
+ Issue: issue,
+ Reviewer: user,
+ })
+ require.NoError(t, err)
+ assert.Equal(t, "New Review", review.Content)
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Review{Content: "New Review"})
+}
+
+func TestGetReviewersByIssueID(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 3})
+ user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ org3 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 3})
+ user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
+
+ expectedReviews := []*issues_model.Review{}
+ expectedReviews = append(expectedReviews,
+ &issues_model.Review{
+ Reviewer: org3,
+ Type: issues_model.ReviewTypeReject,
+ UpdatedUnix: 946684812,
+ },
+ &issues_model.Review{
+ Reviewer: user4,
+ Type: issues_model.ReviewTypeApprove,
+ UpdatedUnix: 946684813,
+ },
+ &issues_model.Review{
+ Reviewer: user2,
+ Type: issues_model.ReviewTypeReject,
+ UpdatedUnix: 946684814,
+ })
+
+ allReviews, err := issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
+ require.NoError(t, err)
+ for _, review := range allReviews {
+ require.NoError(t, review.LoadReviewer(db.DefaultContext))
+ }
+ if assert.Len(t, allReviews, 3) {
+ for i, review := range allReviews {
+ assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
+ assert.Equal(t, expectedReviews[i].Type, review.Type)
+ assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
+ }
+ }
+
+ allReviews, err = issues_model.GetReviewsByIssueID(db.DefaultContext, issue.ID)
+ require.NoError(t, err)
+ require.NoError(t, allReviews.LoadReviewers(db.DefaultContext))
+ if assert.Len(t, allReviews, 3) {
+ for i, review := range allReviews {
+ assert.Equal(t, expectedReviews[i].Reviewer, review.Reviewer)
+ assert.Equal(t, expectedReviews[i].Type, review.Type)
+ assert.Equal(t, expectedReviews[i].UpdatedUnix, review.UpdatedUnix)
+ }
+ }
+}
+
+func TestDismissReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ rejectReviewExample := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ approveReviewExample := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 8})
+ assert.False(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, rejectReviewExample, true))
+ rejectReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ assert.True(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, requestReviewExample, true))
+ rejectReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ assert.True(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, requestReviewExample, true))
+ rejectReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ assert.True(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, requestReviewExample, false))
+ rejectReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ assert.True(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, requestReviewExample, false))
+ rejectReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 9})
+ requestReviewExample = unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: 11})
+ assert.True(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, rejectReviewExample, false))
+ assert.False(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.False(t, approveReviewExample.Dismissed)
+
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, approveReviewExample, true))
+ assert.False(t, rejectReviewExample.Dismissed)
+ assert.False(t, requestReviewExample.Dismissed)
+ assert.True(t, approveReviewExample.Dismissed)
+}
+
+func TestDeleteReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ review1, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Content: "Official rejection",
+ Type: issues_model.ReviewTypeReject,
+ Official: false,
+ Issue: issue,
+ Reviewer: user,
+ })
+ require.NoError(t, err)
+
+ review2, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Content: "Official approval",
+ Type: issues_model.ReviewTypeApprove,
+ Official: true,
+ Issue: issue,
+ Reviewer: user,
+ })
+ require.NoError(t, err)
+
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, review2))
+
+ _, err = issues_model.GetReviewByID(db.DefaultContext, review2.ID)
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrReviewNotExist(err), "IsErrReviewNotExist")
+
+ review1, err = issues_model.GetReviewByID(db.DefaultContext, review1.ID)
+ require.NoError(t, err)
+ assert.True(t, review1.Official)
+}
+
+func TestDeleteDismissedReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: 2})
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: issue.RepoID})
+ review, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Content: "reject",
+ Type: issues_model.ReviewTypeReject,
+ Official: false,
+ Issue: issue,
+ Reviewer: user,
+ })
+ require.NoError(t, err)
+ require.NoError(t, issues_model.DismissReview(db.DefaultContext, review, true))
+ comment, err := issues_model.CreateComment(db.DefaultContext, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeDismissReview,
+ Doer: user,
+ Repo: repo,
+ Issue: issue,
+ ReviewID: review.ID,
+ Content: "dismiss",
+ })
+ require.NoError(t, err)
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: comment.ID})
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, review))
+ unittest.AssertNotExistsBean(t, &issues_model.Comment{ID: comment.ID})
+}
+
+func TestAddReviewRequest(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})
+ require.NoError(t, pull.LoadIssue(db.DefaultContext))
+ issue := pull.Issue
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+ reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ _, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Issue: issue,
+ Reviewer: reviewer,
+ Type: issues_model.ReviewTypeReject,
+ })
+
+ require.NoError(t, err)
+ pull.HasMerged = false
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ issue.IsClosed = true
+ _, err = issues_model.AddReviewRequest(db.DefaultContext, issue, reviewer, &user_model.User{})
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err))
+
+ pull.HasMerged = true
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ issue.IsClosed = false
+ _, err = issues_model.AddReviewRequest(db.DefaultContext, issue, reviewer, &user_model.User{})
+ require.Error(t, err)
+ assert.True(t, issues_model.IsErrReviewRequestOnClosedPR(err))
+}
diff --git a/models/issues/stopwatch.go b/models/issues/stopwatch.go
new file mode 100644
index 0000000..fd9c7d7
--- /dev/null
+++ b/models/issues/stopwatch.go
@@ -0,0 +1,296 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// ErrIssueStopwatchNotExist represents an error that stopwatch is not exist
+type ErrIssueStopwatchNotExist struct {
+ UserID int64
+ IssueID int64
+}
+
+func (err ErrIssueStopwatchNotExist) Error() string {
+ return fmt.Sprintf("issue stopwatch doesn't exist[uid: %d, issue_id: %d", err.UserID, err.IssueID)
+}
+
+func (err ErrIssueStopwatchNotExist) Unwrap() error {
+ return util.ErrNotExist
+}
+
+// Stopwatch represents a stopwatch for time tracking.
+type Stopwatch struct {
+ ID int64 `xorm:"pk autoincr"`
+ IssueID int64 `xorm:"INDEX"`
+ UserID int64 `xorm:"INDEX"`
+ CreatedUnix timeutil.TimeStamp `xorm:"created"`
+}
+
+func init() {
+ db.RegisterModel(new(Stopwatch))
+}
+
+// Seconds returns the amount of time passed since creation, based on local server time
+func (s Stopwatch) Seconds() int64 {
+ return int64(timeutil.TimeStampNow() - s.CreatedUnix)
+}
+
+// Duration returns a human-readable duration string based on local server time
+func (s Stopwatch) Duration() string {
+ return util.SecToTime(s.Seconds())
+}
+
+func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, exists bool, err error) {
+ sw = new(Stopwatch)
+ exists, err = db.GetEngine(ctx).
+ Where("user_id = ?", userID).
+ And("issue_id = ?", issueID).
+ Get(sw)
+ return sw, exists, err
+}
+
+// UserIDCount is a simple coalition of UserID and Count
+type UserStopwatch struct {
+ UserID int64
+ StopWatches []*Stopwatch
+}
+
+// GetUIDsAndNotificationCounts between the two provided times
+func GetUIDsAndStopwatch(ctx context.Context) ([]*UserStopwatch, error) {
+ sws := []*Stopwatch{}
+ if err := db.GetEngine(ctx).Where("issue_id != 0").Find(&sws); err != nil {
+ return nil, err
+ }
+ if len(sws) == 0 {
+ return []*UserStopwatch{}, nil
+ }
+
+ lastUserID := int64(-1)
+ res := []*UserStopwatch{}
+ for _, sw := range sws {
+ if lastUserID == sw.UserID {
+ lastUserStopwatch := res[len(res)-1]
+ lastUserStopwatch.StopWatches = append(lastUserStopwatch.StopWatches, sw)
+ } else {
+ res = append(res, &UserStopwatch{
+ UserID: sw.UserID,
+ StopWatches: []*Stopwatch{sw},
+ })
+ }
+ }
+ return res, nil
+}
+
+// GetUserStopwatches return list of all stopwatches of a user
+func GetUserStopwatches(ctx context.Context, userID int64, listOptions db.ListOptions) ([]*Stopwatch, error) {
+ sws := make([]*Stopwatch, 0, 8)
+ sess := db.GetEngine(ctx).Where("stopwatch.user_id = ?", userID)
+ if listOptions.Page != 0 {
+ sess = db.SetSessionPagination(sess, &listOptions)
+ }
+
+ err := sess.Find(&sws)
+ if err != nil {
+ return nil, err
+ }
+ return sws, nil
+}
+
+// CountUserStopwatches return count of all stopwatches of a user
+func CountUserStopwatches(ctx context.Context, userID int64) (int64, error) {
+ return db.GetEngine(ctx).Where("user_id = ?", userID).Count(&Stopwatch{})
+}
+
+// StopwatchExists returns true if the stopwatch exists
+func StopwatchExists(ctx context.Context, userID, issueID int64) bool {
+ _, exists, _ := getStopwatch(ctx, userID, issueID)
+ return exists
+}
+
+// HasUserStopwatch returns true if the user has a stopwatch
+func HasUserStopwatch(ctx context.Context, userID int64) (exists bool, sw *Stopwatch, issue *Issue, err error) {
+ type stopwatchIssueRepo struct {
+ Stopwatch `xorm:"extends"`
+ Issue `xorm:"extends"`
+ repo.Repository `xorm:"extends"`
+ }
+
+ swIR := new(stopwatchIssueRepo)
+ exists, err = db.GetEngine(ctx).
+ Table("stopwatch").
+ Where("user_id = ?", userID).
+ Join("INNER", "issue", "issue.id = stopwatch.issue_id").
+ Join("INNER", "repository", "repository.id = issue.repo_id").
+ Get(swIR)
+ if exists {
+ sw = &swIR.Stopwatch
+ issue = &swIR.Issue
+ issue.Repo = &swIR.Repository
+ }
+ return exists, sw, issue, err
+}
+
+// FinishIssueStopwatchIfPossible if stopwatch exist then finish it otherwise ignore
+func FinishIssueStopwatchIfPossible(ctx context.Context, user *user_model.User, issue *Issue) error {
+ _, exists, err := getStopwatch(ctx, user.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+ if !exists {
+ return nil
+ }
+ return FinishIssueStopwatch(ctx, user, issue)
+}
+
+// CreateOrStopIssueStopwatch create an issue stopwatch if it's not exist, otherwise finish it
+func CreateOrStopIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+ _, exists, err := getStopwatch(ctx, user.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+ if exists {
+ return FinishIssueStopwatch(ctx, user, issue)
+ }
+ return CreateIssueStopwatch(ctx, user, issue)
+}
+
+// FinishIssueStopwatch if stopwatch exist then finish it otherwise return an error
+func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+ sw, exists, err := getStopwatch(ctx, user.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+ if !exists {
+ return ErrIssueStopwatchNotExist{
+ UserID: user.ID,
+ IssueID: issue.ID,
+ }
+ }
+
+ // Create tracked time out of the time difference between start date and actual date
+ timediff := time.Now().Unix() - int64(sw.CreatedUnix)
+
+ // Create TrackedTime
+ tt := &TrackedTime{
+ Created: time.Now(),
+ IssueID: issue.ID,
+ UserID: user.ID,
+ Time: timediff,
+ }
+
+ if err := db.Insert(ctx, tt); err != nil {
+ return err
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Doer: user,
+ Issue: issue,
+ Repo: issue.Repo,
+ Content: util.SecToTime(timediff),
+ Type: CommentTypeStopTracking,
+ TimeID: tt.ID,
+ }); err != nil {
+ return err
+ }
+ _, err = db.DeleteByBean(ctx, sw)
+ return err
+}
+
+// CreateIssueStopwatch creates a stopwatch if not exist, otherwise return an error
+func CreateIssueStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ // if another stopwatch is running: stop it
+ exists, _, otherIssue, err := HasUserStopwatch(ctx, user.ID)
+ if err != nil {
+ return err
+ }
+ if exists {
+ if err := FinishIssueStopwatch(ctx, user, otherIssue); err != nil {
+ return err
+ }
+ }
+
+ // Create stopwatch
+ sw := &Stopwatch{
+ UserID: user.ID,
+ IssueID: issue.ID,
+ }
+
+ if err := db.Insert(ctx, sw); err != nil {
+ return err
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Doer: user,
+ Issue: issue,
+ Repo: issue.Repo,
+ Type: CommentTypeStartTracking,
+ }); err != nil {
+ return err
+ }
+
+ return nil
+}
+
+// CancelStopwatch removes the given stopwatch and logs it into issue's timeline.
+func CancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+ if err := cancelStopwatch(ctx, user, issue); err != nil {
+ return err
+ }
+ return committer.Commit()
+}
+
+func cancelStopwatch(ctx context.Context, user *user_model.User, issue *Issue) error {
+ e := db.GetEngine(ctx)
+ sw, exists, err := getStopwatch(ctx, user.ID, issue.ID)
+ if err != nil {
+ return err
+ }
+
+ if exists {
+ if _, err := e.Delete(sw); err != nil {
+ return err
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Doer: user,
+ Issue: issue,
+ Repo: issue.Repo,
+ Type: CommentTypeCancelTracking,
+ }); err != nil {
+ return err
+ }
+ }
+ return nil
+}
diff --git a/models/issues/stopwatch_test.go b/models/issues/stopwatch_test.go
new file mode 100644
index 0000000..68a11ac
--- /dev/null
+++ b/models/issues/stopwatch_test.go
@@ -0,0 +1,79 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/timeutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCancelStopwatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user1, err := user_model.GetUserByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+
+ err = issues_model.CancelStopwatch(db.DefaultContext, user1, issue1)
+ require.NoError(t, err)
+ unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: user1.ID, IssueID: issue1.ID})
+
+ _ = unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeCancelTracking, PosterID: user1.ID, IssueID: issue1.ID})
+
+ require.NoError(t, issues_model.CancelStopwatch(db.DefaultContext, user1, issue2))
+}
+
+func TestStopwatchExists(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ assert.True(t, issues_model.StopwatchExists(db.DefaultContext, 1, 1))
+ assert.False(t, issues_model.StopwatchExists(db.DefaultContext, 1, 2))
+}
+
+func TestHasUserStopwatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ exists, sw, _, err := issues_model.HasUserStopwatch(db.DefaultContext, 1)
+ require.NoError(t, err)
+ assert.True(t, exists)
+ assert.Equal(t, int64(1), sw.ID)
+
+ exists, _, _, err = issues_model.HasUserStopwatch(db.DefaultContext, 3)
+ require.NoError(t, err)
+ assert.False(t, exists)
+}
+
+func TestCreateOrStopIssueStopwatch(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ user2, err := user_model.GetUserByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+ org3, err := user_model.GetUserByID(db.DefaultContext, 3)
+ require.NoError(t, err)
+
+ issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+ issue2, err := issues_model.GetIssueByID(db.DefaultContext, 2)
+ require.NoError(t, err)
+
+ require.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, org3, issue1))
+ sw := unittest.AssertExistsAndLoadBean(t, &issues_model.Stopwatch{UserID: 3, IssueID: 1})
+ assert.LessOrEqual(t, sw.CreatedUnix, timeutil.TimeStampNow())
+
+ require.NoError(t, issues_model.CreateOrStopIssueStopwatch(db.DefaultContext, user2, issue2))
+ unittest.AssertNotExistsBean(t, &issues_model.Stopwatch{UserID: 2, IssueID: 2})
+ unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: 2, IssueID: 2})
+}
diff --git a/models/issues/tracked_time.go b/models/issues/tracked_time.go
new file mode 100644
index 0000000..caa582a
--- /dev/null
+++ b/models/issues/tracked_time.go
@@ -0,0 +1,386 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+
+ "xorm.io/builder"
+ "xorm.io/xorm"
+)
+
+// TrackedTime represents a time that was spent for a specific issue.
+type TrackedTime struct {
+ ID int64 `xorm:"pk autoincr"`
+ IssueID int64 `xorm:"INDEX"`
+ Issue *Issue `xorm:"-"`
+ UserID int64 `xorm:"INDEX"`
+ User *user_model.User `xorm:"-"`
+ Created time.Time `xorm:"-"`
+ CreatedUnix int64 `xorm:"created"`
+ Time int64 `xorm:"NOT NULL"`
+ Deleted bool `xorm:"NOT NULL DEFAULT false"`
+}
+
+func init() {
+ db.RegisterModel(new(TrackedTime))
+}
+
+// TrackedTimeList is a List of TrackedTime's
+type TrackedTimeList []*TrackedTime
+
+// AfterLoad is invoked from XORM after setting the values of all fields of this object.
+func (t *TrackedTime) AfterLoad() {
+ t.Created = time.Unix(t.CreatedUnix, 0).In(setting.DefaultUILocation)
+}
+
+// LoadAttributes load Issue, User
+func (t *TrackedTime) LoadAttributes(ctx context.Context) (err error) {
+ // Load the issue
+ if t.Issue == nil {
+ t.Issue, err = GetIssueByID(ctx, t.IssueID)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ }
+ // Now load the repo for the issue (which we may have just loaded)
+ if t.Issue != nil {
+ err = t.Issue.LoadRepo(ctx)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ }
+ // Load the user
+ if t.User == nil {
+ t.User, err = user_model.GetUserByID(ctx, t.UserID)
+ if err != nil {
+ if !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ t.User = user_model.NewGhostUser()
+ }
+ }
+ return nil
+}
+
+// LoadAttributes load Issue, User
+func (tl TrackedTimeList) LoadAttributes(ctx context.Context) error {
+ for _, t := range tl {
+ if err := t.LoadAttributes(ctx); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// FindTrackedTimesOptions represent the filters for tracked times. If an ID is 0 it will be ignored.
+type FindTrackedTimesOptions struct {
+ db.ListOptions
+ IssueID int64
+ UserID int64
+ RepositoryID int64
+ MilestoneID int64
+ CreatedAfterUnix int64
+ CreatedBeforeUnix int64
+}
+
+// toCond will convert each condition into a xorm-Cond
+func (opts *FindTrackedTimesOptions) ToConds() builder.Cond {
+ cond := builder.NewCond().And(builder.Eq{"tracked_time.deleted": false})
+ if opts.IssueID != 0 {
+ cond = cond.And(builder.Eq{"issue_id": opts.IssueID})
+ }
+ if opts.UserID != 0 {
+ cond = cond.And(builder.Eq{"user_id": opts.UserID})
+ }
+ if opts.RepositoryID != 0 {
+ cond = cond.And(builder.Eq{"issue.repo_id": opts.RepositoryID})
+ }
+ if opts.MilestoneID != 0 {
+ cond = cond.And(builder.Eq{"issue.milestone_id": opts.MilestoneID})
+ }
+ if opts.CreatedAfterUnix != 0 {
+ cond = cond.And(builder.Gte{"tracked_time.created_unix": opts.CreatedAfterUnix})
+ }
+ if opts.CreatedBeforeUnix != 0 {
+ cond = cond.And(builder.Lte{"tracked_time.created_unix": opts.CreatedBeforeUnix})
+ }
+ return cond
+}
+
+func (opts *FindTrackedTimesOptions) ToJoins() []db.JoinFunc {
+ if opts.RepositoryID > 0 || opts.MilestoneID > 0 {
+ return []db.JoinFunc{
+ func(e db.Engine) error {
+ e.Join("INNER", "issue", "issue.id = tracked_time.issue_id")
+ return nil
+ },
+ }
+ }
+ return nil
+}
+
+// toSession will convert the given options to a xorm Session by using the conditions from toCond and joining with issue table if required
+func (opts *FindTrackedTimesOptions) toSession(e db.Engine) db.Engine {
+ sess := e
+ if opts.RepositoryID > 0 || opts.MilestoneID > 0 {
+ sess = e.Join("INNER", "issue", "issue.id = tracked_time.issue_id")
+ }
+
+ sess = sess.Where(opts.ToConds())
+
+ if opts.Page != 0 {
+ sess = db.SetSessionPagination(sess, opts)
+ }
+
+ return sess
+}
+
+// GetTrackedTimes returns all tracked times that fit to the given options.
+func GetTrackedTimes(ctx context.Context, options *FindTrackedTimesOptions) (trackedTimes TrackedTimeList, err error) {
+ err = options.toSession(db.GetEngine(ctx)).Find(&trackedTimes)
+ return trackedTimes, err
+}
+
+// CountTrackedTimes returns count of tracked times that fit to the given options.
+func CountTrackedTimes(ctx context.Context, opts *FindTrackedTimesOptions) (int64, error) {
+ sess := db.GetEngine(ctx).Where(opts.ToConds())
+ if opts.RepositoryID > 0 || opts.MilestoneID > 0 {
+ sess = sess.Join("INNER", "issue", "issue.id = tracked_time.issue_id")
+ }
+ return sess.Count(&TrackedTime{})
+}
+
+// GetTrackedSeconds return sum of seconds
+func GetTrackedSeconds(ctx context.Context, opts FindTrackedTimesOptions) (trackedSeconds int64, err error) {
+ return opts.toSession(db.GetEngine(ctx)).SumInt(&TrackedTime{}, "time")
+}
+
+// AddTime will add the given time (in seconds) to the issue
+func AddTime(ctx context.Context, user *user_model.User, issue *Issue, amount int64, created time.Time) (*TrackedTime, error) {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer committer.Close()
+
+ t, err := addTime(ctx, user, issue, amount, created)
+ if err != nil {
+ return nil, err
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Issue: issue,
+ Repo: issue.Repo,
+ Doer: user,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", amount),
+ Type: CommentTypeAddTimeManual,
+ TimeID: t.ID,
+ }); err != nil {
+ return nil, err
+ }
+
+ return t, committer.Commit()
+}
+
+func addTime(ctx context.Context, user *user_model.User, issue *Issue, amount int64, created time.Time) (*TrackedTime, error) {
+ if created.IsZero() {
+ created = time.Now()
+ }
+ tt := &TrackedTime{
+ IssueID: issue.ID,
+ UserID: user.ID,
+ Time: amount,
+ Created: created,
+ }
+ return tt, db.Insert(ctx, tt)
+}
+
+// TotalTimesForEachUser returns the spent time in seconds for each user by an issue
+func TotalTimesForEachUser(ctx context.Context, options *FindTrackedTimesOptions) (map[*user_model.User]int64, error) {
+ trackedTimes, err := GetTrackedTimes(ctx, options)
+ if err != nil {
+ return nil, err
+ }
+ // Adding total time per user ID
+ totalTimesByUser := make(map[int64]int64)
+ for _, t := range trackedTimes {
+ totalTimesByUser[t.UserID] += t.Time
+ }
+
+ totalTimes := make(map[*user_model.User]int64)
+ // Fetching User and making time human readable
+ for userID, total := range totalTimesByUser {
+ user, err := user_model.GetUserByID(ctx, userID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ continue
+ }
+ return nil, err
+ }
+ totalTimes[user] = total
+ }
+ return totalTimes, nil
+}
+
+// DeleteIssueUserTimes deletes times for issue
+func DeleteIssueUserTimes(ctx context.Context, issue *Issue, user *user_model.User) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ opts := FindTrackedTimesOptions{
+ IssueID: issue.ID,
+ UserID: user.ID,
+ }
+
+ removedTime, err := deleteTimes(ctx, opts)
+ if err != nil {
+ return err
+ }
+ if removedTime == 0 {
+ return db.ErrNotExist{Resource: "tracked_time"}
+ }
+
+ if err := issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Issue: issue,
+ Repo: issue.Repo,
+ Doer: user,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", removedTime),
+ Type: CommentTypeDeleteTimeManual,
+ }); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+// DeleteTime delete a specific Time
+func DeleteTime(ctx context.Context, t *TrackedTime) error {
+ ctx, committer, err := db.TxContext(ctx)
+ if err != nil {
+ return err
+ }
+ defer committer.Close()
+
+ if err := t.LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ if err := deleteTime(ctx, t); err != nil {
+ return err
+ }
+
+ if _, err := CreateComment(ctx, &CreateCommentOptions{
+ Issue: t.Issue,
+ Repo: t.Issue.Repo,
+ Doer: t.User,
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so use "|" as delimiter to mark the new format
+ Content: fmt.Sprintf("|%d", t.Time),
+ Type: CommentTypeDeleteTimeManual,
+ }); err != nil {
+ return err
+ }
+
+ return committer.Commit()
+}
+
+func deleteTimes(ctx context.Context, opts FindTrackedTimesOptions) (removedTime int64, err error) {
+ removedTime, err = GetTrackedSeconds(ctx, opts)
+ if err != nil || removedTime == 0 {
+ return removedTime, err
+ }
+
+ _, err = opts.toSession(db.GetEngine(ctx)).Table("tracked_time").Cols("deleted").Update(&TrackedTime{Deleted: true})
+ return removedTime, err
+}
+
+func deleteTime(ctx context.Context, t *TrackedTime) error {
+ if t.Deleted {
+ return db.ErrNotExist{Resource: "tracked_time", ID: t.ID}
+ }
+ t.Deleted = true
+ _, err := db.GetEngine(ctx).ID(t.ID).Cols("deleted").Update(t)
+ return err
+}
+
+// GetTrackedTimeByID returns raw TrackedTime without loading attributes by id
+func GetTrackedTimeByID(ctx context.Context, id int64) (*TrackedTime, error) {
+ time := new(TrackedTime)
+ has, err := db.GetEngine(ctx).ID(id).Get(time)
+ if err != nil {
+ return nil, err
+ } else if !has {
+ return nil, db.ErrNotExist{Resource: "tracked_time", ID: id}
+ }
+ return time, nil
+}
+
+// GetIssueTotalTrackedTime returns the total tracked time for issues by given conditions.
+func GetIssueTotalTrackedTime(ctx context.Context, opts *IssuesOptions, isClosed optional.Option[bool]) (int64, error) {
+ if len(opts.IssueIDs) <= MaxQueryParameters {
+ return getIssueTotalTrackedTimeChunk(ctx, opts, isClosed, opts.IssueIDs)
+ }
+
+ // If too long a list of IDs is provided,
+ // we get the statistics in smaller chunks and get accumulates
+ var accum int64
+ for i := 0; i < len(opts.IssueIDs); {
+ chunk := i + MaxQueryParameters
+ if chunk > len(opts.IssueIDs) {
+ chunk = len(opts.IssueIDs)
+ }
+ time, err := getIssueTotalTrackedTimeChunk(ctx, opts, isClosed, opts.IssueIDs[i:chunk])
+ if err != nil {
+ return 0, err
+ }
+ accum += time
+ i = chunk
+ }
+ return accum, nil
+}
+
+func getIssueTotalTrackedTimeChunk(ctx context.Context, opts *IssuesOptions, isClosed optional.Option[bool], issueIDs []int64) (int64, error) {
+ sumSession := func(opts *IssuesOptions, issueIDs []int64) *xorm.Session {
+ sess := db.GetEngine(ctx).
+ Table("tracked_time").
+ Where("tracked_time.deleted = ?", false).
+ Join("INNER", "issue", "tracked_time.issue_id = issue.id")
+
+ return applyIssuesOptions(sess, opts, issueIDs)
+ }
+
+ type trackedTime struct {
+ Time int64
+ }
+
+ session := sumSession(opts, issueIDs)
+ if isClosed.Has() {
+ session = session.And("issue.is_closed = ?", isClosed.Value())
+ }
+ return session.SumInt(new(trackedTime), "tracked_time.time")
+}
diff --git a/models/issues/tracked_time_test.go b/models/issues/tracked_time_test.go
new file mode 100644
index 0000000..4d4e232
--- /dev/null
+++ b/models/issues/tracked_time_test.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package issues_test
+
+import (
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/optional"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestAddTime(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ org3, err := user_model.GetUserByID(db.DefaultContext, 3)
+ require.NoError(t, err)
+
+ issue1, err := issues_model.GetIssueByID(db.DefaultContext, 1)
+ require.NoError(t, err)
+
+ // 3661 = 1h 1min 1s
+ trackedTime, err := issues_model.AddTime(db.DefaultContext, org3, issue1, 3661, time.Now())
+ require.NoError(t, err)
+ assert.Equal(t, int64(3), trackedTime.UserID)
+ assert.Equal(t, int64(1), trackedTime.IssueID)
+ assert.Equal(t, int64(3661), trackedTime.Time)
+
+ tt := unittest.AssertExistsAndLoadBean(t, &issues_model.TrackedTime{UserID: 3, IssueID: 1})
+ assert.Equal(t, int64(3661), tt.Time)
+
+ comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{Type: issues_model.CommentTypeAddTimeManual, PosterID: 3, IssueID: 1})
+ assert.Equal(t, "|3661", comment.Content)
+}
+
+func TestGetTrackedTimes(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ // by Issue
+ times, err := issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: 1})
+ require.NoError(t, err)
+ assert.Len(t, times, 1)
+ assert.Equal(t, int64(400), times[0].Time)
+
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: -1})
+ require.NoError(t, err)
+ assert.Empty(t, times)
+
+ // by User
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 1})
+ require.NoError(t, err)
+ assert.Len(t, times, 3)
+ assert.Equal(t, int64(400), times[0].Time)
+
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{UserID: 3})
+ require.NoError(t, err)
+ assert.Empty(t, times)
+
+ // by Repo
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 2})
+ require.NoError(t, err)
+ assert.Len(t, times, 3)
+ assert.Equal(t, int64(1), times[0].Time)
+ issue, err := issues_model.GetIssueByID(db.DefaultContext, times[0].IssueID)
+ require.NoError(t, err)
+ assert.Equal(t, int64(2), issue.RepoID)
+
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 1})
+ require.NoError(t, err)
+ assert.Len(t, times, 5)
+
+ times, err = issues_model.GetTrackedTimes(db.DefaultContext, &issues_model.FindTrackedTimesOptions{RepositoryID: 10})
+ require.NoError(t, err)
+ assert.Empty(t, times)
+}
+
+func TestTotalTimesForEachUser(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ total, err := issues_model.TotalTimesForEachUser(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: 1})
+ require.NoError(t, err)
+ assert.Len(t, total, 1)
+ for user, time := range total {
+ assert.EqualValues(t, 1, user.ID)
+ assert.EqualValues(t, 400, time)
+ }
+
+ total, err = issues_model.TotalTimesForEachUser(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: 2})
+ require.NoError(t, err)
+ assert.Len(t, total, 2)
+ for user, time := range total {
+ if user.ID == 2 {
+ assert.EqualValues(t, 3662, time)
+ } else if user.ID == 1 {
+ assert.EqualValues(t, 20, time)
+ } else {
+ require.Error(t, assert.AnError)
+ }
+ }
+
+ total, err = issues_model.TotalTimesForEachUser(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: 5})
+ require.NoError(t, err)
+ assert.Len(t, total, 1)
+ for user, time := range total {
+ assert.EqualValues(t, 2, user.ID)
+ assert.EqualValues(t, 1, time)
+ }
+
+ total, err = issues_model.TotalTimesForEachUser(db.DefaultContext, &issues_model.FindTrackedTimesOptions{IssueID: 4})
+ require.NoError(t, err)
+ assert.Len(t, total, 2)
+}
+
+func TestGetIssueTotalTrackedTime(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ ttt, err := issues_model.GetIssueTotalTrackedTime(db.DefaultContext, &issues_model.IssuesOptions{MilestoneIDs: []int64{1}}, optional.Some(false))
+ require.NoError(t, err)
+ assert.EqualValues(t, 3682, ttt)
+
+ ttt, err = issues_model.GetIssueTotalTrackedTime(db.DefaultContext, &issues_model.IssuesOptions{MilestoneIDs: []int64{1}}, optional.Some(true))
+ require.NoError(t, err)
+ assert.EqualValues(t, 0, ttt)
+
+ ttt, err = issues_model.GetIssueTotalTrackedTime(db.DefaultContext, &issues_model.IssuesOptions{MilestoneIDs: []int64{1}}, optional.None[bool]())
+ require.NoError(t, err)
+ assert.EqualValues(t, 3682, ttt)
+}