summaryrefslogtreecommitdiffstats
path: root/services/pull
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
commitdd136858f1ea40ad3c94191d647487fa4f31926c (patch)
tree58fec94a7b2a12510c9664b21793f1ed560c6518 /services/pull
parentInitial commit. (diff)
downloadforgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.tar.xz
forgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.zip
Adding upstream version 9.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to 'services/pull')
-rw-r--r--services/pull/check.go404
-rw-r--r--services/pull/check_test.go70
-rw-r--r--services/pull/comment.go94
-rw-r--r--services/pull/commit_status.go171
-rw-r--r--services/pull/commit_status_test.go65
-rw-r--r--services/pull/edits.go40
-rw-r--r--services/pull/lfs.go135
-rw-r--r--services/pull/main_test.go17
-rw-r--r--services/pull/merge.go562
-rw-r--r--services/pull/merge_ff_only.go21
-rw-r--r--services/pull/merge_merge.go25
-rw-r--r--services/pull/merge_prepare.go288
-rw-r--r--services/pull/merge_rebase.go121
-rw-r--r--services/pull/merge_squash.go86
-rw-r--r--services/pull/merge_test.go67
-rw-r--r--services/pull/patch.go582
-rw-r--r--services/pull/patch_unmerged.go203
-rw-r--r--services/pull/pull.go1032
-rw-r--r--services/pull/pull_test.go94
-rw-r--r--services/pull/review.go465
-rw-r--r--services/pull/review_test.go49
-rw-r--r--services/pull/temp_repo.go196
-rw-r--r--services/pull/update.go180
-rw-r--r--services/pull/update_rebase.go107
24 files changed, 5074 insertions, 0 deletions
diff --git a/services/pull/check.go b/services/pull/check.go
new file mode 100644
index 0000000..2d91ed0
--- /dev/null
+++ b/services/pull/check.go
@@ -0,0 +1,404 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/timeutil"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// prPatchCheckerQueue represents a queue to handle update pull request tests
+var prPatchCheckerQueue *queue.WorkerPoolQueue[string]
+
+var (
+ ErrIsClosed = errors.New("pull is closed")
+ ErrUserNotAllowedToMerge = models.ErrDisallowedToMerge{}
+ ErrHasMerged = errors.New("has already been merged")
+ ErrIsWorkInProgress = errors.New("work in progress PRs cannot be merged")
+ ErrIsChecking = errors.New("cannot merge while conflict checking is in progress")
+ ErrNotMergeableState = errors.New("not in mergeable state")
+ ErrDependenciesLeft = errors.New("is blocked by an open dependency")
+)
+
+// AddToTaskQueue adds itself to pull request test task queue.
+func AddToTaskQueue(ctx context.Context, pr *issues_model.PullRequest) {
+ pr.Status = issues_model.PullRequestStatusChecking
+ err := pr.UpdateColsIfNotMerged(ctx, "status")
+ if err != nil {
+ log.Error("AddToTaskQueue(%-v).UpdateCols.(add to queue): %v", pr, err)
+ return
+ }
+ log.Trace("Adding %-v to the test pull requests queue", pr)
+ err = prPatchCheckerQueue.Push(strconv.FormatInt(pr.ID, 10))
+ if err != nil && err != queue.ErrAlreadyInQueue {
+ log.Error("Error adding %-v to the test pull requests queue: %v", pr, err)
+ }
+}
+
+type MergeCheckType int
+
+const (
+ MergeCheckTypeGeneral MergeCheckType = iota // general merge checks for "merge", "rebase", "squash", etc
+ MergeCheckTypeManually // Manually Merged button (mark a PR as merged manually)
+ MergeCheckTypeAuto // Auto Merge (Scheduled Merge) After Checks Succeed
+)
+
+// CheckPullMergeable check if the pull mergeable based on all conditions (branch protection, merge options, ...)
+func CheckPullMergeable(stdCtx context.Context, doer *user_model.User, perm *access_model.Permission, pr *issues_model.PullRequest, mergeCheckType MergeCheckType, adminSkipProtectionCheck bool) error {
+ return db.WithTx(stdCtx, func(ctx context.Context) error {
+ if pr.HasMerged {
+ return ErrHasMerged
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("Unable to load issue[%d] for %-v: %v", pr.IssueID, pr, err)
+ return err
+ } else if pr.Issue.IsClosed {
+ return ErrIsClosed
+ }
+
+ if allowedMerge, err := IsUserAllowedToMerge(ctx, pr, *perm, doer); err != nil {
+ log.Error("Error whilst checking if %-v is allowed to merge %-v: %v", doer, pr, err)
+ return err
+ } else if !allowedMerge {
+ return ErrUserNotAllowedToMerge
+ }
+
+ if mergeCheckType == MergeCheckTypeManually {
+ // if doer is doing "manually merge" (mark as merged manually), do not check anything
+ return nil
+ }
+
+ if pr.IsWorkInProgress(ctx) {
+ return ErrIsWorkInProgress
+ }
+
+ if !pr.CanAutoMerge() && !pr.IsEmpty() {
+ return ErrNotMergeableState
+ }
+
+ if pr.IsChecking() {
+ return ErrIsChecking
+ }
+
+ if pb, err := CheckPullBranchProtections(ctx, pr, false); err != nil {
+ if !models.IsErrDisallowedToMerge(err) {
+ log.Error("Error whilst checking pull branch protection for %-v: %v", pr, err)
+ return err
+ }
+
+ // Now the branch protection check failed, check whether the failure could be skipped (skip by setting err = nil)
+
+ // * when doing Auto Merge (Scheduled Merge After Checks Succeed), skip the branch protection check
+ if mergeCheckType == MergeCheckTypeAuto {
+ err = nil
+ }
+
+ // * if the doer is admin, they could skip the branch protection check,
+ // if that's allowed by the protected branch rule.
+ if adminSkipProtectionCheck {
+ if doer.IsAdmin {
+ err = nil // instance admin can skip the check, so clear the error
+ } else if !pb.ApplyToAdmins {
+ if isRepoAdmin, errCheckAdmin := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); errCheckAdmin != nil {
+ log.Error("Unable to check if %-v is a repo admin in %-v: %v", doer, pr.BaseRepo, errCheckAdmin)
+ return errCheckAdmin
+ } else if isRepoAdmin {
+ err = nil // repo admin can skip the check, so clear the error
+ }
+ }
+ }
+
+ // If there is still a branch protection check error, return it
+ if err != nil {
+ return err
+ }
+ }
+
+ if _, err := isSignedIfRequired(ctx, pr, doer); err != nil {
+ return err
+ }
+
+ if noDeps, err := issues_model.IssueNoDependenciesLeft(ctx, pr.Issue); err != nil {
+ return err
+ } else if !noDeps {
+ return ErrDependenciesLeft
+ }
+
+ return nil
+ })
+}
+
+// isSignedIfRequired check if merge will be signed if required
+func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) (bool, error) {
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+
+ if pb == nil || !pb.RequireSignedCommits {
+ return true, nil
+ }
+
+ sign, _, _, err := asymkey_service.SignMerge(ctx, pr, doer, pr.BaseRepo.RepoPath(), pr.BaseBranch, pr.GetGitRefName())
+
+ return sign, err
+}
+
+// checkAndUpdateStatus checks if pull request is possible to leaving checking status,
+// and set to be either conflict or mergeable.
+func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) {
+ // If status has not been changed to conflict by testPatch then we are mergeable
+ if pr.Status == issues_model.PullRequestStatusChecking {
+ pr.Status = issues_model.PullRequestStatusMergeable
+ }
+
+ // Make sure there is no waiting test to process before leaving the checking status.
+ has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ if err != nil {
+ log.Error("Unable to check if the queue is waiting to reprocess %-v. Error: %v", pr, err)
+ }
+
+ if has {
+ log.Trace("Not updating status for %-v as it is due to be rechecked", pr)
+ return
+ }
+
+ if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil {
+ log.Error("Update[%-v]: %v", pr, err)
+ }
+}
+
+// getMergeCommit checks if a pull request has been merged
+// Returns the git.Commit of the pull request if merged
+func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Commit, error) {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("unable to load base repo for %s: %w", pr, err)
+ }
+
+ prHeadRef := pr.GetGitRefName()
+
+ // Check if the pull request is merged into BaseBranch
+ if _, _, err := git.NewCommand(ctx, "merge-base", "--is-ancestor").
+ AddDynamicArguments(prHeadRef, pr.BaseBranch).
+ RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}); err != nil {
+ if strings.Contains(err.Error(), "exit status 1") {
+ // prHeadRef is not an ancestor of the base branch
+ return nil, nil
+ }
+ // Errors are signaled by a non-zero status that is not 1
+ return nil, fmt.Errorf("%-v git merge-base --is-ancestor: %w", pr, err)
+ }
+
+ // If merge-base successfully exits then prHeadRef is an ancestor of pr.BaseBranch
+
+ // Find the head commit id
+ prHeadCommitID, err := git.GetFullCommitID(ctx, pr.BaseRepo.RepoPath(), prHeadRef)
+ if err != nil {
+ return nil, fmt.Errorf("GetFullCommitID(%s) in %s: %w", prHeadRef, pr.BaseRepo.FullName(), err)
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, fmt.Errorf("%-v OpenRepository: %w", pr.BaseRepo, err)
+ }
+ defer gitRepo.Close()
+
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+
+ // Get the commit from BaseBranch where the pull request got merged
+ mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse").
+ AddDynamicArguments(prHeadCommitID + ".." + pr.BaseBranch).
+ RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ if err != nil {
+ return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %w", err)
+ } else if len(mergeCommit) < objectFormat.FullLength() {
+ // PR was maybe fast-forwarded, so just use last commit of PR
+ mergeCommit = prHeadCommitID
+ }
+ mergeCommit = strings.TrimSpace(mergeCommit)
+
+ commit, err := gitRepo.GetCommit(mergeCommit)
+ if err != nil {
+ return nil, fmt.Errorf("GetMergeCommit[%s]: %w", mergeCommit, err)
+ }
+
+ return commit, nil
+}
+
+// manuallyMerged checks if a pull request got manually merged
+// When a pull request got manually merged mark the pull request as merged
+func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("%-v LoadBaseRepo: %v", pr, err)
+ return false
+ }
+
+ if unit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+ if !config.AutodetectManualMerge {
+ return false
+ }
+ } else {
+ log.Error("%-v BaseRepo.GetUnit(unit.TypePullRequests): %v", pr, err)
+ return false
+ }
+
+ commit, err := getMergeCommit(ctx, pr)
+ if err != nil {
+ log.Error("%-v getMergeCommit: %v", pr, err)
+ return false
+ }
+
+ if commit == nil {
+ // no merge commit found
+ return false
+ }
+
+ pr.MergedCommitID = commit.ID.String()
+ pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix())
+ pr.Status = issues_model.PullRequestStatusManuallyMerged
+ merger, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+
+ // When the commit author is unknown set the BaseRepo owner as merger
+ if merger == nil {
+ if pr.BaseRepo.Owner == nil {
+ if err = pr.BaseRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
+ return false
+ }
+ }
+ merger = pr.BaseRepo.Owner
+ }
+ pr.Merger = merger
+ pr.MergerID = merger.ID
+
+ if merged, err := pr.SetMerged(ctx); err != nil {
+ log.Error("%-v setMerged : %v", pr, err)
+ return false
+ } else if !merged {
+ return false
+ }
+
+ notify_service.MergePullRequest(ctx, merger, pr)
+
+ log.Info("manuallyMerged[%-v]: Marked as manually merged into %s/%s by commit id: %s", pr, pr.BaseRepo.Name, pr.BaseBranch, commit.ID.String())
+ return true
+}
+
+// InitializePullRequests checks and tests untested patches of pull requests.
+func InitializePullRequests(ctx context.Context) {
+ prs, err := issues_model.GetPullRequestIDsByCheckStatus(ctx, issues_model.PullRequestStatusChecking)
+ if err != nil {
+ log.Error("Find Checking PRs: %v", err)
+ return
+ }
+ for _, prID := range prs {
+ select {
+ case <-ctx.Done():
+ return
+ default:
+ log.Trace("Adding PR[%d] to the pull requests patch checking queue", prID)
+ if err := prPatchCheckerQueue.Push(strconv.FormatInt(prID, 10)); err != nil {
+ log.Error("Error adding PR[%d] to the pull requests patch checking queue %v", prID, err)
+ }
+ }
+ }
+}
+
+// handle passed PR IDs and test the PRs
+func handler(items ...string) []string {
+ for _, s := range items {
+ id, _ := strconv.ParseInt(s, 10, 64)
+ testPR(id)
+ }
+ return nil
+}
+
+func testPR(id int64) {
+ pullWorkingPool.CheckIn(fmt.Sprint(id))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(id))
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("Test PR[%d] from patch checking queue", id))
+ defer finished()
+
+ pr, err := issues_model.GetPullRequestByID(ctx, id)
+ if err != nil {
+ log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err)
+ return
+ }
+
+ log.Trace("Testing %-v", pr)
+ defer func() {
+ log.Trace("Done testing %-v (status: %s)", pr, pr.Status)
+ }()
+
+ if pr.HasMerged {
+ log.Trace("%-v is already merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
+ return
+ }
+
+ if manuallyMerged(ctx, pr) {
+ log.Trace("%-v is manually merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
+ return
+ }
+
+ if err := TestPatch(pr); err != nil {
+ log.Error("testPatch[%-v]: %v", pr, err)
+ pr.Status = issues_model.PullRequestStatusError
+ if err := pr.UpdateCols(ctx, "status"); err != nil {
+ log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err)
+ }
+ return
+ }
+ checkAndUpdateStatus(ctx, pr)
+}
+
+// CheckPRsForBaseBranch check all pulls with baseBrannch
+func CheckPRsForBaseBranch(ctx context.Context, baseRepo *repo_model.Repository, baseBranchName string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, baseRepo.ID, baseBranchName)
+ if err != nil {
+ return err
+ }
+
+ for _, pr := range prs {
+ AddToTaskQueue(ctx, pr)
+ }
+
+ return nil
+}
+
+// Init runs the task queue to test all the checking status pull requests
+func Init() error {
+ prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", handler)
+
+ if prPatchCheckerQueue == nil {
+ return fmt.Errorf("unable to create pr_patch_checker queue")
+ }
+
+ go graceful.GetManager().RunWithCancel(prPatchCheckerQueue)
+ go graceful.GetManager().RunWithShutdownContext(InitializePullRequests)
+ return nil
+}
diff --git a/services/pull/check_test.go b/services/pull/check_test.go
new file mode 100644
index 0000000..b99cf01
--- /dev/null
+++ b/services/pull/check_test.go
@@ -0,0 +1,70 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "strconv"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestPullRequest_AddToTaskQueue(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ idChan := make(chan int64, 10)
+ testHandler := func(items ...string) []string {
+ for _, s := range items {
+ id, _ := strconv.ParseInt(s, 10, 64)
+ idChan <- id
+ }
+ return nil
+ }
+
+ cfg, err := setting.GetQueueSettings(setting.CfgProvider, "pr_patch_checker")
+ require.NoError(t, err)
+ prPatchCheckerQueue, err = queue.NewWorkerPoolQueueWithContext(context.Background(), "pr_patch_checker", cfg, testHandler, true)
+ require.NoError(t, err)
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ AddToTaskQueue(db.DefaultContext, pr)
+
+ assert.Eventually(t, func() bool {
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ return pr.Status == issues_model.PullRequestStatusChecking
+ }, 1*time.Second, 100*time.Millisecond)
+
+ has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ assert.True(t, has)
+ require.NoError(t, err)
+
+ go prPatchCheckerQueue.Run()
+
+ select {
+ case id := <-idChan:
+ assert.EqualValues(t, pr.ID, id)
+ case <-time.After(time.Second):
+ assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue")
+ }
+
+ has, err = prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10))
+ assert.False(t, has)
+ require.NoError(t, err)
+
+ pr = unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+ assert.Equal(t, issues_model.PullRequestStatusChecking, pr.Status)
+
+ prPatchCheckerQueue.ShutdownWait(5 * time.Second)
+ prPatchCheckerQueue = nil
+}
diff --git a/services/pull/comment.go b/services/pull/comment.go
new file mode 100644
index 0000000..53587d4
--- /dev/null
+++ b/services/pull/comment.go
@@ -0,0 +1,94 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/json"
+)
+
+// getCommitIDsFromRepo get commit IDs from repo in between oldCommitID and newCommitID
+// isForcePush will be true if oldCommit isn't on the branch
+// Commit on baseBranch will skip
+func getCommitIDsFromRepo(ctx context.Context, repo *repo_model.Repository, oldCommitID, newCommitID, baseBranch string) (commitIDs []string, isForcePush bool, err error) {
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, repo)
+ if err != nil {
+ return nil, false, err
+ }
+ defer closer.Close()
+
+ oldCommit, err := gitRepo.GetCommit(oldCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ newCommit, err := gitRepo.GetCommit(newCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ isForcePush, err = newCommit.IsForcePush(oldCommitID)
+ if err != nil {
+ return nil, false, err
+ }
+
+ if isForcePush {
+ commitIDs = make([]string, 2)
+ commitIDs[0] = oldCommitID
+ commitIDs[1] = newCommitID
+
+ return commitIDs, isForcePush, err
+ }
+
+ // Find commits between new and old commit excluding base branch commits
+ commits, err := gitRepo.CommitsBetweenNotBase(newCommit, oldCommit, baseBranch)
+ if err != nil {
+ return nil, false, err
+ }
+
+ commitIDs = make([]string, 0, len(commits))
+ for i := len(commits) - 1; i >= 0; i-- {
+ commitIDs = append(commitIDs, commits[i].ID.String())
+ }
+
+ return commitIDs, isForcePush, err
+}
+
+// CreatePushPullComment create push code to pull base comment
+func CreatePushPullComment(ctx context.Context, pusher *user_model.User, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (comment *issues_model.Comment, err error) {
+ if pr.HasMerged || oldCommitID == "" || newCommitID == "" {
+ return nil, nil
+ }
+
+ ops := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypePullRequestPush,
+ Doer: pusher,
+ Repo: pr.BaseRepo,
+ }
+
+ var data issues_model.PushActionContent
+
+ data.CommitIDs, data.IsForcePush, err = getCommitIDsFromRepo(ctx, pr.BaseRepo, oldCommitID, newCommitID, pr.BaseBranch)
+ if err != nil {
+ return nil, err
+ }
+
+ ops.Issue = pr.Issue
+
+ dataJSON, err := json.Marshal(data)
+ if err != nil {
+ return nil, err
+ }
+
+ ops.Content = string(dataJSON)
+
+ comment, err = issues_model.CreateComment(ctx, ops)
+
+ return comment, err
+}
diff --git a/services/pull/commit_status.go b/services/pull/commit_status.go
new file mode 100644
index 0000000..0d4763a
--- /dev/null
+++ b/services/pull/commit_status.go
@@ -0,0 +1,171 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/gobwas/glob"
+)
+
+// MergeRequiredContextsCommitStatus returns a commit status state for given required contexts
+func MergeRequiredContextsCommitStatus(commitStatuses []*git_model.CommitStatus, requiredContexts []string) structs.CommitStatusState {
+ // matchedCount is the number of `CommitStatus.Context` that match any context of `requiredContexts`
+ matchedCount := 0
+ returnedStatus := structs.CommitStatusSuccess
+
+ if len(requiredContexts) > 0 {
+ requiredContextsGlob := make(map[string]glob.Glob, len(requiredContexts))
+ for _, ctx := range requiredContexts {
+ if gp, err := glob.Compile(ctx); err != nil {
+ log.Error("glob.Compile %s failed. Error: %v", ctx, err)
+ } else {
+ requiredContextsGlob[ctx] = gp
+ }
+ }
+
+ for _, gp := range requiredContextsGlob {
+ var targetStatus structs.CommitStatusState
+ for _, commitStatus := range commitStatuses {
+ if gp.Match(commitStatus.Context) {
+ targetStatus = commitStatus.State
+ matchedCount++
+ break
+ }
+ }
+
+ // If required rule not match any action, then it is pending
+ if targetStatus == "" {
+ if structs.CommitStatusPending.NoBetterThan(returnedStatus) {
+ returnedStatus = structs.CommitStatusPending
+ }
+ break
+ }
+
+ if targetStatus.NoBetterThan(returnedStatus) {
+ returnedStatus = targetStatus
+ }
+ }
+ }
+
+ if matchedCount == 0 && returnedStatus == structs.CommitStatusSuccess {
+ status := git_model.CalcCommitStatus(commitStatuses)
+ if status != nil {
+ return status.State
+ }
+ return ""
+ }
+
+ return returnedStatus
+}
+
+// IsCommitStatusContextSuccess returns true if all required status check contexts succeed.
+func IsCommitStatusContextSuccess(commitStatuses []*git_model.CommitStatus, requiredContexts []string) bool {
+ // If no specific context is required, require that last commit status is a success
+ if len(requiredContexts) == 0 {
+ status := git_model.CalcCommitStatus(commitStatuses)
+ if status == nil || status.State != structs.CommitStatusSuccess {
+ return false
+ }
+ return true
+ }
+
+ for _, ctx := range requiredContexts {
+ var found bool
+ for _, commitStatus := range commitStatuses {
+ if commitStatus.Context == ctx {
+ if commitStatus.State != structs.CommitStatusSuccess {
+ return false
+ }
+
+ found = true
+ break
+ }
+ }
+ if !found {
+ return false
+ }
+ }
+ return true
+}
+
+// IsPullCommitStatusPass returns if all required status checks PASS
+func IsPullCommitStatusPass(ctx context.Context, pr *issues_model.PullRequest) (bool, error) {
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, fmt.Errorf("GetFirstMatchProtectedBranchRule: %w", err)
+ }
+ if pb == nil || !pb.EnableStatusCheck {
+ return true, nil
+ }
+
+ state, err := GetPullRequestCommitStatusState(ctx, pr)
+ if err != nil {
+ return false, err
+ }
+ return state.IsSuccess(), nil
+}
+
+// GetPullRequestCommitStatusState returns pull request merged commit status state
+func GetPullRequestCommitStatusState(ctx context.Context, pr *issues_model.PullRequest) (structs.CommitStatusState, error) {
+ // Ensure HeadRepo is loaded
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return "", fmt.Errorf("LoadHeadRepo: %w", err)
+ }
+
+ // check if all required status checks are successful
+ headGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ return "", fmt.Errorf("RepositoryFromContextOrOpen: %w", err)
+ }
+ defer closer.Close()
+
+ if pr.Flow == issues_model.PullRequestFlowGithub && !headGitRepo.IsBranchExist(pr.HeadBranch) {
+ return "", errors.New("head branch does not exist, can not merge")
+ }
+ if pr.Flow == issues_model.PullRequestFlowAGit && !git.IsReferenceExist(ctx, headGitRepo.Path, pr.GetGitRefName()) {
+ return "", errors.New("head branch does not exist, can not merge")
+ }
+
+ var sha string
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ sha, err = headGitRepo.GetBranchCommitID(pr.HeadBranch)
+ } else {
+ sha, err = headGitRepo.GetRefCommitID(pr.GetGitRefName())
+ }
+ if err != nil {
+ return "", err
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return "", fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ return "", fmt.Errorf("GetLatestCommitStatus: %w", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return "", fmt.Errorf("GetFirstMatchProtectedBranchRule: %w", err)
+ }
+ var requiredContexts []string
+ if pb != nil {
+ requiredContexts = pb.StatusCheckContexts
+ }
+
+ return MergeRequiredContextsCommitStatus(commitStatuses, requiredContexts), nil
+}
diff --git a/services/pull/commit_status_test.go b/services/pull/commit_status_test.go
new file mode 100644
index 0000000..592acdd
--- /dev/null
+++ b/services/pull/commit_status_test.go
@@ -0,0 +1,65 @@
+// Copyright 2024 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMergeRequiredContextsCommitStatus(t *testing.T) {
+ testCases := [][]*git_model.CommitStatus{
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 3", State: structs.CommitStatusSuccess},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusPending},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusFailure},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusSuccess},
+ },
+ {
+ {Context: "Build 1", State: structs.CommitStatusSuccess},
+ {Context: "Build 2", State: structs.CommitStatusSuccess},
+ {Context: "Build 2t", State: structs.CommitStatusSuccess},
+ },
+ }
+ testCasesRequiredContexts := [][]string{
+ {"Build*"},
+ {"Build*", "Build 2t*"},
+ {"Build*", "Build 2t*"},
+ {"Build*", "Build 2t*", "Build 3*"},
+ {"Build*", "Build *", "Build 2t*", "Build 1*"},
+ }
+
+ testCasesExpected := []structs.CommitStatusState{
+ structs.CommitStatusSuccess,
+ structs.CommitStatusPending,
+ structs.CommitStatusFailure,
+ structs.CommitStatusPending,
+ structs.CommitStatusSuccess,
+ }
+
+ for i, commitStatuses := range testCases {
+ if MergeRequiredContextsCommitStatus(commitStatuses, testCasesRequiredContexts[i]) != testCasesExpected[i] {
+ assert.Fail(t, "Test case failed", "Test case %d failed", i+1)
+ }
+ }
+}
diff --git a/services/pull/edits.go b/services/pull/edits.go
new file mode 100644
index 0000000..c7550dc
--- /dev/null
+++ b/services/pull/edits.go
@@ -0,0 +1,40 @@
+// Copyright 2022 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "errors"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+)
+
+var ErrUserHasNoPermissionForAction = errors.New("user not allowed to do this action")
+
+// SetAllowEdits allow edits from maintainers to PRs
+func SetAllowEdits(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest, allow bool) error {
+ if doer == nil || !pr.Issue.IsPoster(doer.ID) {
+ return ErrUserHasNoPermissionForAction
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return err
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer)
+ if err != nil {
+ return err
+ }
+
+ if !permission.CanWrite(unit_model.TypeCode) {
+ return ErrUserHasNoPermissionForAction
+ }
+
+ pr.AllowMaintainerEdit = allow
+ return issues_model.UpdateAllowEdits(ctx, pr)
+}
diff --git a/services/pull/lfs.go b/services/pull/lfs.go
new file mode 100644
index 0000000..ed03583
--- /dev/null
+++ b/services/pull/lfs.go
@@ -0,0 +1,135 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "io"
+ "strconv"
+ "sync"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/git/pipeline"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// LFSPush pushes lfs objects referred to in new commits in the head repository from the base repository
+func LFSPush(ctx context.Context, tmpBasePath, mergeHeadSHA, mergeBaseSHA string, pr *issues_model.PullRequest) error {
+ // Now we have to implement git lfs push
+ // git rev-list --objects --filter=blob:limit=1k HEAD --not base
+ // pass blob shas in to git cat-file --batch-check (possibly unnecessary)
+ // ensure only blobs and <=1k size then pass in to git cat-file --batch
+ // to read each sha and check each as a pointer
+ // Then if they are lfs -> add them to the baseRepo
+ revListReader, revListWriter := io.Pipe()
+ shasToCheckReader, shasToCheckWriter := io.Pipe()
+ catFileCheckReader, catFileCheckWriter := io.Pipe()
+ shasToBatchReader, shasToBatchWriter := io.Pipe()
+ catFileBatchReader, catFileBatchWriter := io.Pipe()
+ errChan := make(chan error, 1)
+ wg := sync.WaitGroup{}
+ wg.Add(6)
+ // Create the go-routines in reverse order.
+
+ // 6. Take the output of cat-file --batch and check if each file in turn
+ // to see if they're pointers to files in the LFS store associated with
+ // the head repo and add them to the base repo if so
+ go createLFSMetaObjectsFromCatFileBatch(db.DefaultContext, catFileBatchReader, &wg, pr)
+
+ // 5. Take the shas of the blobs and batch read them
+ go pipeline.CatFileBatch(ctx, shasToBatchReader, catFileBatchWriter, &wg, tmpBasePath)
+
+ // 4. From the provided objects restrict to blobs <=1k
+ go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
+
+ // 3. Run batch-check on the objects retrieved from rev-list
+ go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, tmpBasePath)
+
+ // 2. Check each object retrieved rejecting those without names as they will be commits or trees
+ go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg)
+
+ // 1. Run rev-list objects from mergeHead to mergeBase
+ go pipeline.RevListObjects(ctx, revListWriter, &wg, tmpBasePath, mergeHeadSHA, mergeBaseSHA, errChan)
+
+ wg.Wait()
+ select {
+ case err, has := <-errChan:
+ if has {
+ return err
+ }
+ default:
+ }
+ return nil
+}
+
+func createLFSMetaObjectsFromCatFileBatch(ctx context.Context, catFileBatchReader *io.PipeReader, wg *sync.WaitGroup, pr *issues_model.PullRequest) {
+ defer wg.Done()
+ defer catFileBatchReader.Close()
+
+ contentStore := lfs.NewContentStore()
+
+ bufferedReader := bufio.NewReader(catFileBatchReader)
+ buf := make([]byte, 1025)
+ for {
+ // File descriptor line: sha
+ _, err := bufferedReader.ReadString(' ')
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ // Throw away the blob
+ if _, err := bufferedReader.ReadString(' '); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ sizeStr, err := bufferedReader.ReadString('\n')
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ size, err := strconv.Atoi(sizeStr[:len(sizeStr)-1])
+ if err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ pointerBuf := buf[:size+1]
+ if _, err := io.ReadFull(bufferedReader, pointerBuf); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ pointerBuf = pointerBuf[:size]
+ // Now we need to check if the pointerBuf is an LFS pointer
+ pointer, _ := lfs.ReadPointerFromBuffer(pointerBuf)
+ if !pointer.IsValid() {
+ continue
+ }
+
+ exist, _ := contentStore.Exists(pointer)
+ if !exist {
+ continue
+ }
+
+ // Then we need to check that this pointer is in the db
+ if _, err := git_model.GetLFSMetaObjectByOid(ctx, pr.HeadRepoID, pointer.Oid); err != nil {
+ if err == git_model.ErrLFSObjectNotExist {
+ log.Warn("During merge of: %d in %-v, there is a pointer to LFS Oid: %s which although present in the LFS store is not associated with the head repo %-v", pr.Index, pr.BaseRepo, pointer.Oid, pr.HeadRepo)
+ continue
+ }
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ // OK we have a pointer that is associated with the head repo
+ // and is actually a file in the LFS
+ // Therefore it should be associated with the base repo
+ if _, err := git_model.NewLFSMetaObject(ctx, pr.BaseRepoID, pointer); err != nil {
+ _ = catFileBatchReader.CloseWithError(err)
+ break
+ }
+ }
+}
diff --git a/services/pull/main_test.go b/services/pull/main_test.go
new file mode 100644
index 0000000..efbb63a
--- /dev/null
+++ b/services/pull/main_test.go
@@ -0,0 +1,17 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+
+ _ "code.gitea.io/gitea/models/actions"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/services/pull/merge.go b/services/pull/merge.go
new file mode 100644
index 0000000..a1585e6
--- /dev/null
+++ b/services/pull/merge.go
@@ -0,0 +1,562 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/references"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ issue_service "code.gitea.io/gitea/services/issue"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// getMergeMessage composes the message used when merging a pull request.
+func getMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle, extraVars map[string]string) (message, body string, err error) {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.LoadIssue(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.Issue.LoadPoster(ctx); err != nil {
+ return "", "", err
+ }
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ return "", "", err
+ }
+
+ isExternalTracker := pr.BaseRepo.UnitEnabled(ctx, unit.TypeExternalTracker)
+ issueReference := "#"
+ if isExternalTracker {
+ issueReference = "!"
+ }
+
+ issueURL, err := url.JoinPath(setting.AppURL, pr.Issue.Link())
+ if err != nil {
+ return "", "", err
+ }
+ reviewedOn := fmt.Sprintf("Reviewed-on: %s", issueURL)
+ reviewedBy := pr.GetApprovers(ctx)
+
+ if mergeStyle != "" {
+ commit, err := baseGitRepo.GetBranchCommit(pr.BaseRepo.DefaultBranch)
+ if err != nil {
+ return "", "", err
+ }
+
+ templateFilepathForgejo := fmt.Sprintf(".forgejo/default_merge_message/%s_TEMPLATE.md", strings.ToUpper(string(mergeStyle)))
+ templateFilepathGitea := fmt.Sprintf(".gitea/default_merge_message/%s_TEMPLATE.md", strings.ToUpper(string(mergeStyle)))
+
+ templateContent, err := commit.GetFileContent(templateFilepathForgejo, setting.Repository.PullRequest.DefaultMergeMessageSize)
+ if _, ok := err.(git.ErrNotExist); ok {
+ templateContent, err = commit.GetFileContent(templateFilepathGitea, setting.Repository.PullRequest.DefaultMergeMessageSize)
+ }
+ if err != nil {
+ if !git.IsErrNotExist(err) {
+ return "", "", err
+ }
+ } else {
+ vars := map[string]string{
+ "BaseRepoOwnerName": pr.BaseRepo.OwnerName,
+ "BaseRepoName": pr.BaseRepo.Name,
+ "BaseBranch": pr.BaseBranch,
+ "HeadRepoOwnerName": "",
+ "HeadRepoName": "",
+ "HeadBranch": pr.HeadBranch,
+ "PullRequestTitle": pr.Issue.Title,
+ "PullRequestDescription": pr.Issue.Content,
+ "PullRequestPosterName": pr.Issue.Poster.Name,
+ "PullRequestIndex": strconv.FormatInt(pr.Index, 10),
+ "PullRequestReference": fmt.Sprintf("%s%d", issueReference, pr.Index),
+ "ReviewedOn": reviewedOn,
+ "ReviewedBy": reviewedBy,
+ }
+ if pr.HeadRepo != nil {
+ vars["HeadRepoOwnerName"] = pr.HeadRepo.OwnerName
+ vars["HeadRepoName"] = pr.HeadRepo.Name
+ }
+ for extraKey, extraValue := range extraVars {
+ vars[extraKey] = extraValue
+ }
+ refs, err := pr.ResolveCrossReferences(ctx)
+ if err == nil {
+ closeIssueIndexes := make([]string, 0, len(refs))
+ closeWord := "close"
+ if len(setting.Repository.PullRequest.CloseKeywords) > 0 {
+ closeWord = setting.Repository.PullRequest.CloseKeywords[0]
+ }
+ for _, ref := range refs {
+ if ref.RefAction == references.XRefActionCloses {
+ if err := ref.LoadIssue(ctx); err != nil {
+ return "", "", err
+ }
+ closeIssueIndexes = append(closeIssueIndexes, fmt.Sprintf("%s %s%d", closeWord, issueReference, ref.Issue.Index))
+ }
+ }
+ if len(closeIssueIndexes) > 0 {
+ vars["ClosingIssues"] = strings.Join(closeIssueIndexes, ", ")
+ } else {
+ vars["ClosingIssues"] = ""
+ }
+ }
+ message, body = expandDefaultMergeMessage(templateContent, vars)
+ return message, body, nil
+ }
+ }
+
+ if mergeStyle == repo_model.MergeStyleRebase {
+ // for fast-forward rebase, do not amend the last commit if there is no template
+ return "", "", nil
+ }
+
+ body = fmt.Sprintf("%s\n%s", reviewedOn, reviewedBy)
+
+ // Squash merge has a different from other styles.
+ if mergeStyle == repo_model.MergeStyleSquash {
+ return fmt.Sprintf("%s (%s%d)", pr.Issue.Title, issueReference, pr.Issue.Index), body, nil
+ }
+
+ if pr.BaseRepoID == pr.HeadRepoID {
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), body, nil
+ }
+
+ if pr.HeadRepo == nil {
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from <deleted>:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadBranch, pr.BaseBranch), body, nil
+ }
+
+ return fmt.Sprintf("Merge pull request '%s' (%s%d) from %s:%s into %s", pr.Issue.Title, issueReference, pr.Issue.Index, pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseBranch), body, nil
+}
+
+func expandDefaultMergeMessage(template string, vars map[string]string) (message, body string) {
+ message = strings.TrimSpace(template)
+ if splits := strings.SplitN(message, "\n", 2); len(splits) == 2 {
+ message = splits[0]
+ body = strings.TrimSpace(splits[1])
+ }
+ mapping := func(s string) string { return vars[s] }
+ return os.Expand(message, mapping), os.Expand(body, mapping)
+}
+
+// GetDefaultMergeMessage returns default message used when merging pull request
+func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr *issues_model.PullRequest, mergeStyle repo_model.MergeStyle) (message, body string, err error) {
+ return getMergeMessage(ctx, baseGitRepo, pr, mergeStyle, nil)
+}
+
+// Merge merges pull request to base repository.
+// Caller should check PR is ready to be merged (review and status checks)
+func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repo: %v", err)
+ return fmt.Errorf("unable to load base repo: %w", err)
+ } else if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("Unable to load head repo: %v", err)
+ return fmt.Errorf("unable to load head repo: %w", err)
+ }
+
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err)
+ return err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // Check if merge style is correct and allowed
+ if !prConfig.IsMergeStyleAllowed(mergeStyle) {
+ return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
+ }
+
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "", 0)
+ }()
+
+ _, err = doMergeAndPush(ctx, pr, doer, mergeStyle, expectedHeadCommitID, message, repo_module.PushTriggerPRMergeToBase)
+ if err != nil {
+ return err
+ }
+
+ // reload pull request because it has been updated by post receive hook
+ pr, err = issues_model.GetPullRequestByID(ctx, pr.ID)
+ if err != nil {
+ return err
+ }
+
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("LoadIssue %-v: %v", pr, err)
+ }
+
+ if err := pr.Issue.LoadRepo(ctx); err != nil {
+ log.Error("pr.Issue.LoadRepo %-v: %v", pr, err)
+ }
+ if err := pr.Issue.Repo.LoadOwner(ctx); err != nil {
+ log.Error("LoadOwner for %-v: %v", pr, err)
+ }
+
+ if wasAutoMerged {
+ notify_service.AutoMergePullRequest(ctx, doer, pr)
+ } else {
+ notify_service.MergePullRequest(ctx, doer, pr)
+ }
+
+ // Reset cached commit count
+ cache.Remove(pr.Issue.Repo.GetCommitsCountCacheKey(pr.BaseBranch, true))
+
+ return handleCloseCrossReferences(ctx, pr, doer)
+}
+
+func handleCloseCrossReferences(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
+ // Resolve cross references
+ refs, err := pr.ResolveCrossReferences(ctx)
+ if err != nil {
+ log.Error("ResolveCrossReferences: %v", err)
+ return nil
+ }
+
+ for _, ref := range refs {
+ if err = ref.LoadIssue(ctx); err != nil {
+ return err
+ }
+ if err = ref.Issue.LoadRepo(ctx); err != nil {
+ return err
+ }
+ isClosed := ref.RefAction == references.XRefActionCloses
+ if isClosed != ref.Issue.IsClosed {
+ if err = issue_service.ChangeStatus(ctx, ref.Issue, doer, pr.MergedCommitID, isClosed); err != nil {
+ // Allow ErrDependenciesLeft
+ if !issues_model.IsErrDependenciesLeft(err) {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
+
+// doMergeAndPush performs the merge operation without changing any pull information in database and pushes it up to the base repository
+func doMergeAndPush(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, pushTrigger repo_module.PushTrigger) (string, error) { //nolint:unparam
+ // Clone base repo.
+ mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, expectedHeadCommitID)
+ if err != nil {
+ return "", err
+ }
+ defer cancel()
+
+ // Merge commits.
+ switch mergeStyle {
+ case repo_model.MergeStyleMerge:
+ if err := doMergeStyleMerge(mergeCtx, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleRebase, repo_model.MergeStyleRebaseMerge:
+ if err := doMergeStyleRebase(mergeCtx, mergeStyle, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleSquash:
+ if err := doMergeStyleSquash(mergeCtx, message); err != nil {
+ return "", err
+ }
+ case repo_model.MergeStyleFastForwardOnly:
+ if err := doMergeStyleFastForwardOnly(mergeCtx); err != nil {
+ return "", err
+ }
+ default:
+ return "", models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: mergeStyle}
+ }
+
+ // OK we should cache our current head and origin/headbranch
+ mergeHeadSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "HEAD")
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+ mergeBaseSHA, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, "original_"+baseBranch)
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for origin/%s: %w", pr.BaseBranch, err)
+ }
+ mergeCommitID, err := git.GetFullCommitID(ctx, mergeCtx.tmpBasePath, baseBranch)
+ if err != nil {
+ return "", fmt.Errorf("Failed to get full commit id for the new merge: %w", err)
+ }
+
+ // Now it's questionable about where this should go - either after or before the push
+ // I think in the interests of data safety - failures to push to the lfs should prevent
+ // the merge as you can always remerge.
+ if setting.LFS.StartServer {
+ if err := LFSPush(ctx, mergeCtx.tmpBasePath, mergeHeadSHA, mergeBaseSHA, pr); err != nil {
+ return "", err
+ }
+ }
+
+ var headUser *user_model.User
+ err = pr.HeadRepo.LoadOwner(ctx)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("Can't find user: %d for head repository in %-v: %v", pr.HeadRepo.OwnerID, pr, err)
+ return "", err
+ }
+ log.Warn("Can't find user: %d for head repository in %-v - defaulting to doer: %s - %v", pr.HeadRepo.OwnerID, pr, doer.Name, err)
+ headUser = doer
+ } else {
+ headUser = pr.HeadRepo.Owner
+ }
+
+ mergeCtx.env = repo_module.FullPushingEnvironment(
+ headUser,
+ doer,
+ pr.BaseRepo,
+ pr.BaseRepo.Name,
+ pr.ID,
+ )
+
+ mergeCtx.env = append(mergeCtx.env, repo_module.EnvPushTrigger+"="+string(pushTrigger))
+ pushCmd := git.NewCommand(ctx, "push", "origin").AddDynamicArguments(baseBranch + ":" + git.BranchPrefix + pr.BaseBranch)
+
+ // Push back to upstream.
+ // This cause an api call to "/api/internal/hook/post-receive/...",
+ // If it's merge, all db transaction and operations should be there but not here to prevent deadlock.
+ if err := pushCmd.Run(mergeCtx.RunOpts()); err != nil {
+ if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ return "", &git.ErrPushOutOfDate{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ err := &git.ErrPushRejected{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ err.GenerateMessage()
+ return "", err
+ }
+ return "", fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ }
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ return mergeCommitID, nil
+}
+
+func commitAndSignNoAuthor(ctx *mergeContext, message string) error {
+ cmdCommit := git.NewCommand(ctx, "commit").AddOptionFormat("--message=%s", message)
+ if ctx.signKeyID == "" {
+ cmdCommit.AddArguments("--no-gpg-sign")
+ } else {
+ cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
+ }
+ if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git commit %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ return nil
+}
+
+func runMergeCommand(ctx *mergeContext, mergeStyle repo_model.MergeStyle, cmd *git.Command) error {
+ if err := cmd.Run(ctx.RunOpts()); err != nil {
+ // Merge will leave a MERGE_HEAD file in the .git folder if there is a conflict
+ if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "MERGE_HEAD")); statErr == nil {
+ // We have a merge conflict error
+ log.Debug("MergeConflict %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeConflicts{
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(ctx.errbuf.String(), "refusing to merge unrelated histories") {
+ log.Debug("MergeUnrelatedHistories %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeUnrelatedHistories{
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ } else if mergeStyle == repo_model.MergeStyleFastForwardOnly && strings.Contains(ctx.errbuf.String(), "Not possible to fast-forward, aborting") {
+ log.Debug("MergeDivergingFastForwardOnly %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrMergeDivergingFastForwardOnly{
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ }
+ log.Error("git merge %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git merge %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+}
+
+var escapedSymbols = regexp.MustCompile(`([*[?! \\])`)
+
+// IsUserAllowedToMerge check if user is allowed to merge PR with given permissions and branch protections
+func IsUserAllowedToMerge(ctx context.Context, pr *issues_model.PullRequest, p access_model.Permission, user *user_model.User) (bool, error) {
+ if user == nil {
+ return false, nil
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, err
+ }
+
+ if (p.CanWrite(unit.TypeCode) && pb == nil) || (pb != nil && git_model.IsUserMergeWhitelisted(ctx, pb, user.ID, p)) {
+ return true, nil
+ }
+
+ return false, nil
+}
+
+// CheckPullBranchProtections checks whether the PR is ready to be merged (reviews and status checks).
+// Returns the protected branch rule when `ErrDisallowedToMerge` is returned as error.
+func CheckPullBranchProtections(ctx context.Context, pr *issues_model.PullRequest, skipProtectedFilesCheck bool) (protectedBranchRule *git_model.ProtectedBranch, err error) {
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return nil, fmt.Errorf("LoadProtectedBranch: %v", err)
+ }
+ if pb == nil {
+ return nil, nil
+ }
+
+ isPass, err := IsPullCommitStatusPass(ctx, pr)
+ if err != nil {
+ return nil, err
+ }
+ if !isPass {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Not all required status checks successful",
+ }
+ }
+
+ if !issues_model.HasEnoughApprovals(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Does not have enough approvals",
+ }
+ }
+ if issues_model.MergeBlockedByRejectedReview(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "There are requested changes",
+ }
+ }
+ if issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "There are official review requests",
+ }
+ }
+
+ if issues_model.MergeBlockedByOutdatedBranch(pb, pr) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "The head branch is behind the base branch",
+ }
+ }
+
+ if skipProtectedFilesCheck {
+ return nil, nil
+ }
+
+ if pb.MergeBlockedByProtectedFiles(pr.ChangedProtectedFiles) {
+ return pb, models.ErrDisallowedToMerge{
+ Reason: "Changed protected files",
+ }
+ }
+
+ return nil, nil
+}
+
+// MergedManually mark pr as merged manually
+func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, commitID string) error {
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return err
+ }
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ return err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // Check if merge style is correct and allowed
+ if !prConfig.IsMergeStyleAllowed(repo_model.MergeStyleManuallyMerged) {
+ return models.ErrInvalidMergeStyle{ID: pr.BaseRepo.ID, Style: repo_model.MergeStyleManuallyMerged}
+ }
+
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ if len(commitID) != objectFormat.FullLength() {
+ return fmt.Errorf("Wrong commit ID")
+ }
+
+ commit, err := baseGitRepo.GetCommit(commitID)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ return fmt.Errorf("Wrong commit ID")
+ }
+ return err
+ }
+ commitID = commit.ID.String()
+
+ ok, err := baseGitRepo.IsCommitInBranch(commitID, pr.BaseBranch)
+ if err != nil {
+ return err
+ }
+ if !ok {
+ return fmt.Errorf("Wrong commit ID")
+ }
+
+ pr.MergedCommitID = commitID
+ pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix())
+ pr.Status = issues_model.PullRequestStatusManuallyMerged
+ pr.Merger = doer
+ pr.MergerID = doer.ID
+
+ var merged bool
+ if merged, err = pr.SetMerged(ctx); err != nil {
+ return err
+ } else if !merged {
+ return fmt.Errorf("SetMerged failed")
+ }
+ return nil
+ }); err != nil {
+ return err
+ }
+
+ notify_service.MergePullRequest(baseGitRepo.Ctx, doer, pr)
+ log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commitID)
+
+ return handleCloseCrossReferences(ctx, pr, doer)
+}
diff --git a/services/pull/merge_ff_only.go b/services/pull/merge_ff_only.go
new file mode 100644
index 0000000..f57c732
--- /dev/null
+++ b/services/pull/merge_ff_only.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// doMergeStyleFastForwardOnly merges the tracking into the current HEAD - which is assumed to be staging branch (equal to the pr.BaseBranch)
+func doMergeStyleFastForwardOnly(ctx *mergeContext) error {
+ cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleFastForwardOnly, cmd); err != nil {
+ log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ return nil
+}
diff --git a/services/pull/merge_merge.go b/services/pull/merge_merge.go
new file mode 100644
index 0000000..bf56c07
--- /dev/null
+++ b/services/pull/merge_merge.go
@@ -0,0 +1,25 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// doMergeStyleMerge merges the tracking branch into the current HEAD - which is assumed to be the staging branch (equal to the pr.BaseBranch)
+func doMergeStyleMerge(ctx *mergeContext, message string) error {
+ cmd := git.NewCommand(ctx, "merge", "--no-ff", "--no-commit").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleMerge, cmd); err != nil {
+ log.Error("%-v Unable to merge tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ if err := commitAndSignNoAuthor(ctx, message); err != nil {
+ log.Error("%-v Unable to make final commit: %v", ctx.pr, err)
+ return err
+ }
+ return nil
+}
diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go
new file mode 100644
index 0000000..88f6c03
--- /dev/null
+++ b/services/pull/merge_prepare.go
@@ -0,0 +1,288 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+)
+
+type mergeContext struct {
+ *prContext
+ doer *user_model.User
+ sig *git.Signature
+ committer *git.Signature
+ signKeyID string // empty for no-sign, non-empty to sign
+ env []string
+}
+
+func (ctx *mergeContext) RunOpts() *git.RunOpts {
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return &git.RunOpts{
+ Env: ctx.env,
+ Dir: ctx.tmpBasePath,
+ Stdout: ctx.outbuf,
+ Stderr: ctx.errbuf,
+ }
+}
+
+func createTemporaryRepoForMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, expectedHeadCommitID string) (mergeCtx *mergeContext, cancel context.CancelFunc, err error) {
+ // Clone base repo.
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ log.Error("createTemporaryRepoForPR: %v", err)
+ return nil, cancel, err
+ }
+
+ mergeCtx = &mergeContext{
+ prContext: prCtx,
+ doer: doer,
+ }
+
+ if expectedHeadCommitID != "" {
+ trackingCommitID, _, err := git.NewCommand(ctx, "show-ref", "--hash").AddDynamicArguments(git.BranchPrefix + trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ if err != nil {
+ defer cancel()
+ log.Error("failed to get sha of head branch in %-v: show-ref[%s] --hash refs/heads/tracking: %v", mergeCtx.pr, mergeCtx.tmpBasePath, err)
+ return nil, nil, fmt.Errorf("unable to get sha of head branch in %v %w", pr, err)
+ }
+ if strings.TrimSpace(trackingCommitID) != expectedHeadCommitID {
+ defer cancel()
+ return nil, nil, models.ErrSHADoesNotMatch{
+ GivenSHA: expectedHeadCommitID,
+ CurrentSHA: trackingCommitID,
+ }
+ }
+ }
+
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+ if err := prepareTemporaryRepoForMerge(mergeCtx); err != nil {
+ defer cancel()
+ return nil, nil, err
+ }
+
+ mergeCtx.sig = doer.NewGitSig()
+ mergeCtx.committer = mergeCtx.sig
+
+ // Determine if we should sign
+ sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, mergeCtx.pr, mergeCtx.doer, mergeCtx.tmpBasePath, "HEAD", trackingBranch)
+ if sign {
+ mergeCtx.signKeyID = keyID
+ if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel {
+ mergeCtx.committer = signer
+ }
+ }
+
+ commitTimeStr := time.Now().Format(time.RFC3339)
+
+ // Because this may call hooks we should pass in the environment
+ mergeCtx.env = append(os.Environ(),
+ "GIT_AUTHOR_NAME="+mergeCtx.sig.Name,
+ "GIT_AUTHOR_EMAIL="+mergeCtx.sig.Email,
+ "GIT_AUTHOR_DATE="+commitTimeStr,
+ "GIT_COMMITTER_NAME="+mergeCtx.committer.Name,
+ "GIT_COMMITTER_EMAIL="+mergeCtx.committer.Email,
+ "GIT_COMMITTER_DATE="+commitTimeStr,
+ )
+
+ return mergeCtx, cancel, nil
+}
+
+// prepareTemporaryRepoForMerge takes a repository that has been created using createTemporaryRepo
+// it then sets up the sparse-checkout and other things
+func prepareTemporaryRepoForMerge(ctx *mergeContext) error {
+ infoPath := filepath.Join(ctx.tmpBasePath, ".git", "info")
+ if err := os.MkdirAll(infoPath, 0o700); err != nil {
+ log.Error("%-v Unable to create .git/info in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to create .git/info in tmpBasePath: %w", err)
+ }
+
+ // Enable sparse-checkout
+ // Here we use the .git/info/sparse-checkout file as described in the git documentation
+ sparseCheckoutListFile, err := os.OpenFile(filepath.Join(infoPath, "sparse-checkout"), os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
+ if err != nil {
+ log.Error("%-v Unable to write .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to write .git/info/sparse-checkout file in tmpBasePath: %w", err)
+ }
+ defer sparseCheckoutListFile.Close() // we will close it earlier but we need to ensure it is closed if there is an error
+
+ if err := getDiffTree(ctx, ctx.tmpBasePath, baseBranch, trackingBranch, sparseCheckoutListFile); err != nil {
+ log.Error("%-v getDiffTree(%s, %s, %s): %v", ctx.pr, ctx.tmpBasePath, baseBranch, trackingBranch, err)
+ return fmt.Errorf("getDiffTree: %w", err)
+ }
+
+ if err := sparseCheckoutListFile.Close(); err != nil {
+ log.Error("%-v Unable to close .git/info/sparse-checkout file in %s: %v", ctx.pr, ctx.tmpBasePath, err)
+ return fmt.Errorf("Unable to close .git/info/sparse-checkout file in tmpBasePath: %w", err)
+ }
+
+ setConfig := func(key, value string) error {
+ if err := git.NewCommand(ctx, "config", "--local").AddDynamicArguments(key, value).
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git config [%s -> %q]: %v\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git config [%s -> %q]: %w\n%s\n%s", key, value, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+ }
+
+ // Switch off LFS process (set required, clean and smudge here also)
+ if err := setConfig("filter.lfs.process", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.required", "false"); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.clean", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("filter.lfs.smudge", ""); err != nil {
+ return err
+ }
+
+ if err := setConfig("core.sparseCheckout", "true"); err != nil {
+ return err
+ }
+
+ // Read base branch index
+ if err := git.NewCommand(ctx, "read-tree", "HEAD").
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git read-tree HEAD: %v\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("Unable to read base branch in to the index: %w\n%s\n%s", err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ return nil
+}
+
+// getDiffTree returns a string containing all the files that were changed between headBranch and baseBranch
+// the filenames are escaped so as to fit the format required for .git/info/sparse-checkout
+func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, out io.Writer) error {
+ diffOutReader, diffOutWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to create os.Pipe for %s", repoPath)
+ return err
+ }
+ defer func() {
+ _ = diffOutReader.Close()
+ _ = diffOutWriter.Close()
+ }()
+
+ scanNullTerminatedStrings := func(data []byte, atEOF bool) (advance int, token []byte, err error) {
+ if atEOF && len(data) == 0 {
+ return 0, nil, nil
+ }
+ if i := bytes.IndexByte(data, '\x00'); i >= 0 {
+ return i + 1, data[0:i], nil
+ }
+ if atEOF {
+ return len(data), data, nil
+ }
+ return 0, nil, nil
+ }
+
+ err = git.NewCommand(ctx, "diff-tree", "--no-commit-id", "--name-only", "-r", "-r", "-z", "--root").AddDynamicArguments(baseBranch, headBranch).
+ Run(&git.RunOpts{
+ Dir: repoPath,
+ Stdout: diffOutWriter,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ // Close the writer end of the pipe to begin processing
+ _ = diffOutWriter.Close()
+ defer func() {
+ // Close the reader on return to terminate the git command if necessary
+ _ = diffOutReader.Close()
+ }()
+
+ // Now scan the output from the command
+ scanner := bufio.NewScanner(diffOutReader)
+ scanner.Split(scanNullTerminatedStrings)
+ for scanner.Scan() {
+ filepath := scanner.Text()
+ // escape '*', '?', '[', spaces and '!' prefix
+ filepath = escapedSymbols.ReplaceAllString(filepath, `\$1`)
+ // no necessary to escape the first '#' symbol because the first symbol is '/'
+ fmt.Fprintf(out, "/%s\n", filepath)
+ }
+ return scanner.Err()
+ },
+ })
+ return err
+}
+
+// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch
+// if there is a conflict it will return a models.ErrRebaseConflicts
+func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
+ // Checkout head branch
+ if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
+ Run(ctx.RunOpts()); err != nil {
+ return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ // Rebase before merging
+ if err := git.NewCommand(ctx, "rebase").AddDynamicArguments(baseBranch).
+ Run(ctx.RunOpts()); err != nil {
+ // Rebase will leave a REBASE_HEAD file in .git if there is a conflict
+ if _, statErr := os.Stat(filepath.Join(ctx.tmpBasePath, ".git", "REBASE_HEAD")); statErr == nil {
+ var commitSha string
+ ok := false
+ failingCommitPaths := []string{
+ filepath.Join(ctx.tmpBasePath, ".git", "rebase-apply", "original-commit"), // Git < 2.26
+ filepath.Join(ctx.tmpBasePath, ".git", "rebase-merge", "stopped-sha"), // Git >= 2.26
+ }
+ for _, failingCommitPath := range failingCommitPaths {
+ if _, statErr := os.Stat(failingCommitPath); statErr == nil {
+ commitShaBytes, readErr := os.ReadFile(failingCommitPath)
+ if readErr != nil {
+ // Abandon this attempt to handle the error
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ commitSha = strings.TrimSpace(string(commitShaBytes))
+ ok = true
+ break
+ }
+ }
+ if !ok {
+ log.Error("Unable to determine failing commit sha for failing rebase in temp repo for %-v. Cannot cast as models.ErrRebaseConflicts.", ctx.pr)
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ log.Debug("Conflict when rebasing staging on to base in %-v at %s: %v\n%s\n%s", ctx.pr, commitSha, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return models.ErrRebaseConflicts{
+ CommitSHA: commitSha,
+ Style: mergeStyle,
+ StdOut: ctx.outbuf.String(),
+ StdErr: ctx.errbuf.String(),
+ Err: err,
+ }
+ }
+ return fmt.Errorf("unable to git rebase staging on to base in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return nil
+}
diff --git a/services/pull/merge_rebase.go b/services/pull/merge_rebase.go
new file mode 100644
index 0000000..ecf3762
--- /dev/null
+++ b/services/pull/merge_rebase.go
@@ -0,0 +1,121 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "fmt"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// getRebaseAmendMessage composes the message to amend commits in rebase merge of a pull request.
+func getRebaseAmendMessage(ctx *mergeContext, baseGitRepo *git.Repository) (message string, err error) {
+ // Get existing commit message.
+ commitMessage, _, err := git.NewCommand(ctx, "show", "--format=%B", "-s").RunStdString(&git.RunOpts{Dir: ctx.tmpBasePath})
+ if err != nil {
+ return "", err
+ }
+
+ commitTitle, commitBody, _ := strings.Cut(commitMessage, "\n")
+ extraVars := map[string]string{"CommitTitle": strings.TrimSpace(commitTitle), "CommitBody": strings.TrimSpace(commitBody)}
+
+ message, body, err := getMergeMessage(ctx, baseGitRepo, ctx.pr, repo_model.MergeStyleRebase, extraVars)
+ if err != nil || message == "" {
+ return "", err
+ }
+
+ if len(body) > 0 {
+ message = message + "\n\n" + body
+ }
+ return message, err
+}
+
+// Perform rebase merge without merge commit.
+func doMergeRebaseFastForward(ctx *mergeContext) error {
+ baseHeadSHA, err := git.GetFullCommitID(ctx, ctx.tmpBasePath, "HEAD")
+ if err != nil {
+ return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+
+ cmd := git.NewCommand(ctx, "merge", "--ff-only").AddDynamicArguments(stagingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleRebase, cmd); err != nil {
+ log.Error("Unable to merge staging into base: %v", err)
+ return err
+ }
+
+ // Check if anything actually changed before we amend the message, fast forward can skip commits.
+ newMergeHeadSHA, err := git.GetFullCommitID(ctx, ctx.tmpBasePath, "HEAD")
+ if err != nil {
+ return fmt.Errorf("Failed to get full commit id for HEAD: %w", err)
+ }
+ if baseHeadSHA == newMergeHeadSHA {
+ return nil
+ }
+
+ // Original repo to read template from.
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, ctx.pr.BaseRepo)
+ if err != nil {
+ log.Error("Unable to get Git repo for rebase: %v", err)
+ return err
+ }
+ defer baseGitRepo.Close()
+
+ // Amend last commit message based on template, if one exists
+ newMessage, err := getRebaseAmendMessage(ctx, baseGitRepo)
+ if err != nil {
+ log.Error("Unable to get commit message for amend: %v", err)
+ return err
+ }
+
+ if newMessage != "" {
+ if err := git.NewCommand(ctx, "commit", "--amend").AddOptionFormat("--message=%s", newMessage).Run(&git.RunOpts{Dir: ctx.tmpBasePath}); err != nil {
+ log.Error("Unable to amend commit message: %v", err)
+ return err
+ }
+ }
+
+ return nil
+}
+
+// Perform rebase merge with merge commit.
+func doMergeRebaseMergeCommit(ctx *mergeContext, message string) error {
+ cmd := git.NewCommand(ctx, "merge").AddArguments("--no-ff", "--no-commit").AddDynamicArguments(stagingBranch)
+
+ if err := runMergeCommand(ctx, repo_model.MergeStyleRebaseMerge, cmd); err != nil {
+ log.Error("Unable to merge staging into base: %v", err)
+ return err
+ }
+ if err := commitAndSignNoAuthor(ctx, message); err != nil {
+ log.Error("Unable to make final commit: %v", err)
+ return err
+ }
+
+ return nil
+}
+
+// doMergeStyleRebase rebases the tracking branch on the base branch as the current HEAD with or with a merge commit to the original pr branch
+func doMergeStyleRebase(ctx *mergeContext, mergeStyle repo_model.MergeStyle, message string) error {
+ if err := rebaseTrackingOnToBase(ctx, mergeStyle); err != nil {
+ return err
+ }
+
+ // Checkout base branch again
+ if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(baseBranch).
+ Run(ctx.RunOpts()); err != nil {
+ log.Error("git checkout base prior to merge post staging rebase %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git checkout base prior to merge post staging rebase %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ if mergeStyle == repo_model.MergeStyleRebase {
+ return doMergeRebaseFastForward(ctx)
+ }
+
+ return doMergeRebaseMergeCommit(ctx, message)
+}
diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go
new file mode 100644
index 0000000..197d810
--- /dev/null
+++ b/services/pull/merge_squash.go
@@ -0,0 +1,86 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "fmt"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// doMergeStyleSquash gets a commit author signature for squash commits
+func getAuthorSignatureSquash(ctx *mergeContext) (*git.Signature, error) {
+ if err := ctx.pr.Issue.LoadPoster(ctx); err != nil {
+ log.Error("%-v Issue[%d].LoadPoster: %v", ctx.pr, ctx.pr.Issue.ID, err)
+ return nil, err
+ }
+
+ // Try to get an signature from the same user in one of the commits, as the
+ // poster email might be private or commits might have a different signature
+ // than the primary email address of the poster.
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpenPath(ctx, ctx.tmpBasePath)
+ if err != nil {
+ log.Error("%-v Unable to open base repository: %v", ctx.pr, err)
+ return nil, err
+ }
+ defer closer.Close()
+
+ commits, err := gitRepo.CommitsBetweenIDs(trackingBranch, "HEAD")
+ if err != nil {
+ log.Error("%-v Unable to get commits between: %s %s: %v", ctx.pr, "HEAD", trackingBranch, err)
+ return nil, err
+ }
+
+ uniqueEmails := make(container.Set[string])
+ for _, commit := range commits {
+ if commit.Author != nil && uniqueEmails.Add(commit.Author.Email) {
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser != nil && commitUser.ID == ctx.pr.Issue.Poster.ID {
+ return commit.Author, nil
+ }
+ }
+ }
+
+ return ctx.pr.Issue.Poster.NewGitSig(), nil
+}
+
+// doMergeStyleSquash squashes the tracking branch on the current HEAD (=base)
+func doMergeStyleSquash(ctx *mergeContext, message string) error {
+ sig, err := getAuthorSignatureSquash(ctx)
+ if err != nil {
+ return fmt.Errorf("getAuthorSignatureSquash: %w", err)
+ }
+
+ cmdMerge := git.NewCommand(ctx, "merge", "--squash").AddDynamicArguments(trackingBranch)
+ if err := runMergeCommand(ctx, repo_model.MergeStyleSquash, cmdMerge); err != nil {
+ log.Error("%-v Unable to merge --squash tracking into base: %v", ctx.pr, err)
+ return err
+ }
+
+ if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() {
+ // add trailer
+ message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String())
+ }
+ cmdCommit := git.NewCommand(ctx, "commit").
+ AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
+ AddOptionFormat("--message=%s", message)
+ if ctx.signKeyID == "" {
+ cmdCommit.AddArguments("--no-gpg-sign")
+ } else {
+ cmdCommit.AddOptionFormat("-S%s", ctx.signKeyID)
+ }
+ if err := cmdCommit.Run(ctx.RunOpts()); err != nil {
+ log.Error("git commit %-v: %v\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", ctx.pr.HeadRepo.FullName(), ctx.pr.HeadBranch, ctx.pr.BaseRepo.FullName(), ctx.pr.BaseBranch, err, ctx.outbuf.String(), ctx.errbuf.String())
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return nil
+}
diff --git a/services/pull/merge_test.go b/services/pull/merge_test.go
new file mode 100644
index 0000000..6df6f55
--- /dev/null
+++ b/services/pull/merge_test.go
@@ -0,0 +1,67 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_expandDefaultMergeMessage(t *testing.T) {
+ type args struct {
+ template string
+ vars map[string]string
+ }
+ tests := []struct {
+ name string
+ args args
+ want string
+ wantBody string
+ }{
+ {
+ name: "single line",
+ args: args{
+ template: "Merge ${PullRequestTitle}",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "",
+ },
+ {
+ name: "multiple lines",
+ args: args{
+ template: "Merge ${PullRequestTitle}\nDescription:\n\n${PullRequestDescription}\n",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "Description:\n\nPull\nRequest\nDescription\n",
+ },
+ {
+ name: "leading newlines",
+ args: args{
+ template: "\n\n\nMerge ${PullRequestTitle}\n\n\nDescription:\n\n${PullRequestDescription}\n",
+ vars: map[string]string{
+ "PullRequestTitle": "PullRequestTitle",
+ "PullRequestDescription": "Pull\nRequest\nDescription\n",
+ },
+ },
+ want: "Merge PullRequestTitle",
+ wantBody: "Description:\n\nPull\nRequest\nDescription\n",
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ got, got1 := expandDefaultMergeMessage(tt.args.template, tt.args.vars)
+ assert.Equalf(t, tt.want, got, "expandDefaultMergeMessage(%v, %v)", tt.args.template, tt.args.vars)
+ assert.Equalf(t, tt.wantBody, got1, "expandDefaultMergeMessage(%v, %v)", tt.args.template, tt.args.vars)
+ })
+ }
+}
diff --git a/services/pull/patch.go b/services/pull/patch.go
new file mode 100644
index 0000000..e90b4bd
--- /dev/null
+++ b/services/pull/patch.go
@@ -0,0 +1,582 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/gobwas/glob"
+)
+
+// DownloadDiffOrPatch will write the patch for the pr to the writer
+func DownloadDiffOrPatch(ctx context.Context, pr *issues_model.PullRequest, w io.Writer, patch, binary bool) error {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository ID %d for pr #%d [%d]", pr.BaseRepoID, pr.Index, pr.ID)
+ return err
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer closer.Close()
+
+ if err := gitRepo.GetDiffOrPatch(pr.MergeBase, pr.GetGitRefName(), w, patch, binary); err != nil {
+ log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ return fmt.Errorf("Unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ }
+ return nil
+}
+
+var patchErrorSuffices = []string{
+ ": already exists in index",
+ ": patch does not apply",
+ ": already exists in working directory",
+ "unrecognized input",
+ ": No such file or directory",
+ ": does not exist in index",
+}
+
+// TestPatch will test whether a simple patch will apply
+func TestPatch(pr *issues_model.PullRequest) error {
+ ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr))
+ defer finished()
+
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return err
+ }
+ defer cancel()
+
+ return testPatch(ctx, prCtx, pr)
+}
+
+func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullRequest) error {
+ gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
+ if err != nil {
+ return fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer gitRepo.Close()
+
+ // 1. update merge base
+ pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
+ if err != nil {
+ var err2 error
+ pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
+ if err2 != nil {
+ return fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2)
+ }
+ }
+ pr.MergeBase = strings.TrimSpace(pr.MergeBase)
+ if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + "tracking"); err != nil {
+ return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err)
+ }
+
+ if pr.HeadCommitID == pr.MergeBase {
+ pr.Status = issues_model.PullRequestStatusAncestor
+ return nil
+ }
+
+ // 2. Check for conflicts
+ if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ return err
+ }
+
+ // 3. Check for protected files changes
+ if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil {
+ return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err)
+ }
+
+ if len(pr.ChangedProtectedFiles) > 0 {
+ log.Trace("Found %d protected files changed", len(pr.ChangedProtectedFiles))
+ }
+
+ pr.Status = issues_model.PullRequestStatusMergeable
+
+ return nil
+}
+
+type errMergeConflict struct {
+ filename string
+}
+
+func (e *errMergeConflict) Error() string {
+ return fmt.Sprintf("conflict detected at: %s", e.filename)
+}
+
+func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, filesToRemove *[]string, filesToAdd *[]git.IndexObjectInfo) error {
+ log.Trace("Attempt to merge:\n%v", file)
+
+ switch {
+ case file.stage1 != nil && (file.stage2 == nil || file.stage3 == nil):
+ // 1. Deleted in one or both:
+ //
+ // Conflict <==> the stage1 !SameAs to the undeleted one
+ if (file.stage2 != nil && !file.stage1.SameAs(file.stage2)) || (file.stage3 != nil && !file.stage1.SameAs(file.stage3)) {
+ // Conflict!
+ return &errMergeConflict{file.stage1.path}
+ }
+
+ // Not a genuine conflict and we can simply remove the file from the index
+ *filesToRemove = append(*filesToRemove, file.stage1.path)
+ return nil
+ case file.stage1 == nil && file.stage2 != nil && (file.stage3 == nil || file.stage2.SameAs(file.stage3)):
+ // 2. Added in ours but not in theirs or identical in both
+ //
+ // Not a genuine conflict just add to the index
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage2.mode, Object: git.MustIDFromString(file.stage2.sha), Filename: file.stage2.path})
+ return nil
+ case file.stage1 == nil && file.stage2 != nil && file.stage3 != nil && file.stage2.sha == file.stage3.sha && file.stage2.mode != file.stage3.mode:
+ // 3. Added in both with the same sha but the modes are different
+ //
+ // Conflict! (Not sure that this can actually happen but we should handle)
+ return &errMergeConflict{file.stage2.path}
+ case file.stage1 == nil && file.stage2 == nil && file.stage3 != nil:
+ // 4. Added in theirs but not ours:
+ //
+ // Not a genuine conflict just add to the index
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage3.mode, Object: git.MustIDFromString(file.stage3.sha), Filename: file.stage3.path})
+ return nil
+ case file.stage1 == nil:
+ // 5. Created by new in both
+ //
+ // Conflict!
+ return &errMergeConflict{file.stage2.path}
+ case file.stage2 != nil && file.stage3 != nil:
+ // 5. Modified in both - we should try to merge in the changes but first:
+ //
+ if file.stage2.mode == "120000" || file.stage3.mode == "120000" {
+ // 5a. Conflicting symbolic link change
+ return &errMergeConflict{file.stage2.path}
+ }
+ if file.stage2.mode == "160000" || file.stage3.mode == "160000" {
+ // 5b. Conflicting submodule change
+ return &errMergeConflict{file.stage2.path}
+ }
+ if file.stage2.mode != file.stage3.mode {
+ // 5c. Conflicting mode change
+ return &errMergeConflict{file.stage2.path}
+ }
+
+ // Need to get the objects from the object db to attempt to merge
+ root, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage1.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get root object: %s at path: %s for merging. Error: %w", file.stage1.sha, file.stage1.path, err)
+ }
+ root = strings.TrimSpace(root)
+ defer func() {
+ _ = util.Remove(filepath.Join(tmpBasePath, root))
+ }()
+
+ base, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage2.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get base object: %s at path: %s for merging. Error: %w", file.stage2.sha, file.stage2.path, err)
+ }
+ base = strings.TrimSpace(filepath.Join(tmpBasePath, base))
+ defer func() {
+ _ = util.Remove(base)
+ }()
+ head, _, err := git.NewCommand(ctx, "unpack-file").AddDynamicArguments(file.stage3.sha).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return fmt.Errorf("unable to get head object:%s at path: %s for merging. Error: %w", file.stage3.sha, file.stage3.path, err)
+ }
+ head = strings.TrimSpace(head)
+ defer func() {
+ _ = util.Remove(filepath.Join(tmpBasePath, head))
+ }()
+
+ // now git merge-file annoyingly takes a different order to the merge-tree ...
+ _, _, conflictErr := git.NewCommand(ctx, "merge-file").AddDynamicArguments(base, root, head).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if conflictErr != nil {
+ return &errMergeConflict{file.stage2.path}
+ }
+
+ // base now contains the merged data
+ hash, _, err := git.NewCommand(ctx, "hash-object", "-w", "--path").AddDynamicArguments(file.stage2.path, base).RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return err
+ }
+ hash = strings.TrimSpace(hash)
+ *filesToAdd = append(*filesToAdd, git.IndexObjectInfo{Mode: file.stage2.mode, Object: git.MustIDFromString(hash), Filename: file.stage2.path})
+ return nil
+ default:
+ if file.stage1 != nil {
+ return &errMergeConflict{file.stage1.path}
+ } else if file.stage2 != nil {
+ return &errMergeConflict{file.stage2.path}
+ } else if file.stage3 != nil {
+ return &errMergeConflict{file.stage3.path}
+ }
+ }
+ return nil
+}
+
+// AttemptThreeWayMerge will attempt to three way merge using git read-tree and then follow the git merge-one-file algorithm to attempt to resolve basic conflicts
+func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repository, base, ours, theirs, description string) (bool, []string, error) {
+ ctx, cancel := context.WithCancel(ctx)
+ defer cancel()
+
+ // First we use read-tree to do a simple three-way merge
+ if _, _, err := git.NewCommand(ctx, "read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitPath}); err != nil {
+ log.Error("Unable to run read-tree -m! Error: %v", err)
+ return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err)
+ }
+
+ var filesToRemove []string
+ var filesToAdd []git.IndexObjectInfo
+
+ // Then we use git ls-files -u to list the unmerged files and collate the triples in unmergedfiles
+ unmerged := make(chan *unmergedFile)
+ go unmergedFiles(ctx, gitPath, unmerged)
+
+ defer func() {
+ cancel()
+ for range unmerged {
+ // empty the unmerged channel
+ }
+ }()
+
+ numberOfConflicts := 0
+ conflict := false
+ conflictedFiles := make([]string, 0, 5)
+
+ for file := range unmerged {
+ if file == nil {
+ break
+ }
+ if file.err != nil {
+ cancel()
+ return false, nil, file.err
+ }
+
+ // OK now we have the unmerged file triplet attempt to merge it
+ if err := attemptMerge(ctx, file, gitPath, &filesToRemove, &filesToAdd); err != nil {
+ if conflictErr, ok := err.(*errMergeConflict); ok {
+ log.Trace("Conflict: %s in %s", conflictErr.filename, description)
+ conflict = true
+ if numberOfConflicts < 10 {
+ conflictedFiles = append(conflictedFiles, conflictErr.filename)
+ }
+ numberOfConflicts++
+ continue
+ }
+ return false, nil, err
+ }
+ }
+
+ // Add and remove files in one command, as this is slow with many files otherwise
+ if err := gitRepo.RemoveFilesFromIndex(filesToRemove...); err != nil {
+ return false, nil, err
+ }
+ if err := gitRepo.AddObjectsToIndex(filesToAdd...); err != nil {
+ return false, nil, err
+ }
+
+ return conflict, conflictedFiles, nil
+}
+
+func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) {
+ // 1. checkConflicts resets the conflict status - therefore - reset the conflict status
+ pr.ConflictedFiles = nil
+
+ // 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base
+ description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index)
+ conflict, conflictFiles, err := AttemptThreeWayMerge(ctx,
+ tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description)
+ if err != nil {
+ return false, err
+ }
+
+ if !conflict {
+ // No conflicts detected so we need to check if the patch is empty...
+ // a. Write the newly merged tree and check the new tree-hash
+ var treeHash string
+ treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ lsfiles, _, _ := git.NewCommand(ctx, "ls-files", "-u").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles)
+ }
+ treeHash = strings.TrimSpace(treeHash)
+ baseTree, err := gitRepo.GetTree("base")
+ if err != nil {
+ return false, err
+ }
+
+ // b. compare the new tree-hash with the base tree hash
+ if treeHash == baseTree.ID.String() {
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ }
+
+ return false, nil
+ }
+
+ // 3. OK the three-way merge method has detected conflicts
+ // 3a. Are still testing with GitApply? If not set the conflict status and move on
+ if !setting.Repository.PullRequest.TestConflictingPatchesWithGitApply {
+ pr.Status = issues_model.PullRequestStatusConflict
+ pr.ConflictedFiles = conflictFiles
+
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+ return true, nil
+ }
+
+ // 3b. Create a plain patch from head to base
+ tmpPatchFile, err := os.CreateTemp("", "patch")
+ if err != nil {
+ log.Error("Unable to create temporary patch file! Error: %v", err)
+ return false, fmt.Errorf("unable to create temporary patch file! Error: %w", err)
+ }
+ defer func() {
+ _ = util.Remove(tmpPatchFile.Name())
+ }()
+
+ if err := gitRepo.GetDiffBinary(pr.MergeBase, "tracking", tmpPatchFile); err != nil {
+ tmpPatchFile.Close()
+ log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ return false, fmt.Errorf("unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
+ }
+ stat, err := tmpPatchFile.Stat()
+ if err != nil {
+ tmpPatchFile.Close()
+ return false, fmt.Errorf("unable to stat patch file: %w", err)
+ }
+ patchPath := tmpPatchFile.Name()
+ tmpPatchFile.Close()
+
+ // 3c. if the size of that patch is 0 - there can be no conflicts!
+ if stat.Size() == 0 {
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ return false, nil
+ }
+
+ log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath)
+
+ // 4. Read the base branch in to the index of the temporary repository
+ _, _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ if err != nil {
+ return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err)
+ }
+
+ // 5. Now get the pull request configuration to check if we need to ignore whitespace
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ return false, err
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ // 6. Prepare the arguments to apply the patch against the index
+ cmdApply := git.NewCommand(gitRepo.Ctx, "apply", "--check", "--cached")
+ if prConfig.IgnoreWhitespaceConflicts {
+ cmdApply.AddArguments("--ignore-whitespace")
+ }
+ is3way := false
+ if git.CheckGitVersionAtLeast("2.32.0") == nil {
+ cmdApply.AddArguments("--3way")
+ is3way = true
+ }
+ cmdApply.AddDynamicArguments(patchPath)
+
+ // 7. Prep the pipe:
+ // - Here we could do the equivalent of:
+ // `git apply --check --cached patch_file > conflicts`
+ // Then iterate through the conflicts. However, that means storing all the conflicts
+ // in memory - which is very wasteful.
+ // - alternatively we can do the equivalent of:
+ // `git apply --check ... | grep ...`
+ // meaning we don't store all of the conflicts unnecessarily.
+ stderrReader, stderrWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to open stderr pipe: %v", err)
+ return false, fmt.Errorf("unable to open stderr pipe: %w", err)
+ }
+ defer func() {
+ _ = stderrReader.Close()
+ _ = stderrWriter.Close()
+ }()
+
+ // 8. Run the check command
+ conflict = false
+ err = cmdApply.Run(&git.RunOpts{
+ Dir: tmpBasePath,
+ Stderr: stderrWriter,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ // Close the writer end of the pipe to begin processing
+ _ = stderrWriter.Close()
+ defer func() {
+ // Close the reader on return to terminate the git command if necessary
+ _ = stderrReader.Close()
+ }()
+
+ const prefix = "error: patch failed:"
+ const errorPrefix = "error: "
+ const threewayFailed = "Failed to perform three-way merge..."
+ const appliedPatchPrefix = "Applied patch to '"
+ const withConflicts = "' with conflicts."
+
+ conflicts := make(container.Set[string])
+
+ // Now scan the output from the command
+ scanner := bufio.NewScanner(stderrReader)
+ for scanner.Scan() {
+ line := scanner.Text()
+ log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line)
+ if strings.HasPrefix(line, prefix) {
+ conflict = true
+ filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0])
+ conflicts.Add(filepath)
+ } else if is3way && line == threewayFailed {
+ conflict = true
+ } else if strings.HasPrefix(line, errorPrefix) {
+ conflict = true
+ for _, suffix := range patchErrorSuffices {
+ if strings.HasSuffix(line, suffix) {
+ filepath := strings.TrimSpace(strings.TrimSuffix(line[len(errorPrefix):], suffix))
+ if filepath != "" {
+ conflicts.Add(filepath)
+ }
+ break
+ }
+ }
+ } else if is3way && strings.HasPrefix(line, appliedPatchPrefix) && strings.HasSuffix(line, withConflicts) {
+ conflict = true
+ filepath := strings.TrimPrefix(strings.TrimSuffix(line, withConflicts), appliedPatchPrefix)
+ if filepath != "" {
+ conflicts.Add(filepath)
+ }
+ }
+ // only list 10 conflicted files
+ if len(conflicts) >= 10 {
+ break
+ }
+ }
+
+ if len(conflicts) > 0 {
+ pr.ConflictedFiles = make([]string, 0, len(conflicts))
+ for key := range conflicts {
+ pr.ConflictedFiles = append(pr.ConflictedFiles, key)
+ }
+ }
+
+ return nil
+ },
+ })
+
+ // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts.
+ // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts.
+ if len(pr.ConflictedFiles) > 0 {
+ if conflict {
+ pr.Status = issues_model.PullRequestStatusConflict
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+
+ return true, nil
+ }
+ } else if err != nil {
+ return false, fmt.Errorf("git apply --check: %w", err)
+ }
+ return false, nil
+}
+
+// CheckFileProtection check file Protection
+func CheckFileProtection(repo *git.Repository, oldCommitID, newCommitID string, patterns []glob.Glob, limit int, env []string) ([]string, error) {
+ if len(patterns) == 0 {
+ return nil, nil
+ }
+ affectedFiles, err := git.GetAffectedFiles(repo, oldCommitID, newCommitID, env)
+ if err != nil {
+ return nil, err
+ }
+ changedProtectedFiles := make([]string, 0, limit)
+ for _, affectedFile := range affectedFiles {
+ lpath := strings.ToLower(affectedFile)
+ for _, pat := range patterns {
+ if pat.Match(lpath) {
+ changedProtectedFiles = append(changedProtectedFiles, lpath)
+ break
+ }
+ }
+ if len(changedProtectedFiles) >= limit {
+ break
+ }
+ }
+ if len(changedProtectedFiles) > 0 {
+ err = models.ErrFilePathProtected{
+ Path: changedProtectedFiles[0],
+ }
+ }
+ return changedProtectedFiles, err
+}
+
+// CheckUnprotectedFiles check if the commit only touches unprotected files
+func CheckUnprotectedFiles(repo *git.Repository, oldCommitID, newCommitID string, patterns []glob.Glob, env []string) (bool, error) {
+ if len(patterns) == 0 {
+ return false, nil
+ }
+ affectedFiles, err := git.GetAffectedFiles(repo, oldCommitID, newCommitID, env)
+ if err != nil {
+ return false, err
+ }
+ for _, affectedFile := range affectedFiles {
+ lpath := strings.ToLower(affectedFile)
+ unprotected := false
+ for _, pat := range patterns {
+ if pat.Match(lpath) {
+ unprotected = true
+ break
+ }
+ }
+ if !unprotected {
+ return false, nil
+ }
+ }
+ return true, nil
+}
+
+// checkPullFilesProtection check if pr changed protected files and save results
+func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error {
+ if pr.Status == issues_model.PullRequestStatusEmpty {
+ pr.ChangedProtectedFiles = nil
+ return nil
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return err
+ }
+
+ if pb == nil {
+ pr.ChangedProtectedFiles = nil
+ return nil
+ }
+
+ pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ())
+ if err != nil && !models.IsErrFilePathProtected(err) {
+ return err
+ }
+ return nil
+}
diff --git a/services/pull/patch_unmerged.go b/services/pull/patch_unmerged.go
new file mode 100644
index 0000000..c60c48d
--- /dev/null
+++ b/services/pull/patch_unmerged.go
@@ -0,0 +1,203 @@
+// Copyright 2021 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bufio"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// lsFileLine is a Quadruplet struct (+error) representing a partially parsed line from ls-files
+type lsFileLine struct {
+ mode string
+ sha string
+ stage int
+ path string
+ err error
+}
+
+// SameAs checks if two lsFileLines are referring to the same path, sha and mode (ignoring stage)
+func (line *lsFileLine) SameAs(other *lsFileLine) bool {
+ if line == nil || other == nil {
+ return false
+ }
+
+ if line.err != nil || other.err != nil {
+ return false
+ }
+
+ return line.mode == other.mode &&
+ line.sha == other.sha &&
+ line.path == other.path
+}
+
+// String provides a string representation for logging
+func (line *lsFileLine) String() string {
+ if line == nil {
+ return "<nil>"
+ }
+ if line.err != nil {
+ return fmt.Sprintf("%d %s %s %s %v", line.stage, line.mode, line.path, line.sha, line.err)
+ }
+ return fmt.Sprintf("%d %s %s %s", line.stage, line.mode, line.path, line.sha)
+}
+
+// readUnmergedLsFileLines calls git ls-files -u -z and parses the lines into mode-sha-stage-path quadruplets
+// it will push these to the provided channel closing it at the end
+func readUnmergedLsFileLines(ctx context.Context, tmpBasePath string, outputChan chan *lsFileLine) {
+ defer func() {
+ // Always close the outputChan at the end of this function
+ close(outputChan)
+ }()
+
+ lsFilesReader, lsFilesWriter, err := os.Pipe()
+ if err != nil {
+ log.Error("Unable to open stderr pipe: %v", err)
+ outputChan <- &lsFileLine{err: fmt.Errorf("unable to open stderr pipe: %w", err)}
+ return
+ }
+ defer func() {
+ _ = lsFilesWriter.Close()
+ _ = lsFilesReader.Close()
+ }()
+
+ stderr := &strings.Builder{}
+ err = git.NewCommand(ctx, "ls-files", "-u", "-z").
+ Run(&git.RunOpts{
+ Dir: tmpBasePath,
+ Stdout: lsFilesWriter,
+ Stderr: stderr,
+ PipelineFunc: func(_ context.Context, _ context.CancelFunc) error {
+ _ = lsFilesWriter.Close()
+ defer func() {
+ _ = lsFilesReader.Close()
+ }()
+ bufferedReader := bufio.NewReader(lsFilesReader)
+
+ for {
+ line, err := bufferedReader.ReadString('\000')
+ if err != nil {
+ if err == io.EOF {
+ return nil
+ }
+ return err
+ }
+ toemit := &lsFileLine{}
+
+ split := strings.SplitN(line, " ", 3)
+ if len(split) < 3 {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+ toemit.mode = split[0]
+ toemit.sha = split[1]
+
+ if len(split[2]) < 4 {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+
+ toemit.stage, err = strconv.Atoi(split[2][0:1])
+ if err != nil {
+ return fmt.Errorf("malformed line: %s", line)
+ }
+
+ toemit.path = split[2][2 : len(split[2])-1]
+ outputChan <- toemit
+ }
+ },
+ })
+ if err != nil {
+ outputChan <- &lsFileLine{err: fmt.Errorf("git ls-files -u -z: %w", git.ConcatenateError(err, stderr.String()))}
+ }
+}
+
+// unmergedFile is triple (+error) of lsFileLines split into stages 1,2 & 3.
+type unmergedFile struct {
+ stage1 *lsFileLine
+ stage2 *lsFileLine
+ stage3 *lsFileLine
+ err error
+}
+
+// String provides a string representation of the an unmerged file for logging
+func (u *unmergedFile) String() string {
+ if u == nil {
+ return "<nil>"
+ }
+ if u.err != nil {
+ return fmt.Sprintf("error: %v\n%v\n%v\n%v", u.err, u.stage1, u.stage2, u.stage3)
+ }
+ return fmt.Sprintf("%v\n%v\n%v", u.stage1, u.stage2, u.stage3)
+}
+
+// unmergedFiles will collate the output from readUnstagedLsFileLines in to file triplets and send them
+// to the provided channel, closing at the end.
+func unmergedFiles(ctx context.Context, tmpBasePath string, unmerged chan *unmergedFile) {
+ defer func() {
+ // Always close the channel
+ close(unmerged)
+ }()
+
+ ctx, cancel := context.WithCancel(ctx)
+ lsFileLineChan := make(chan *lsFileLine, 10) // give lsFileLineChan a buffer
+ go readUnmergedLsFileLines(ctx, tmpBasePath, lsFileLineChan)
+ defer func() {
+ cancel()
+ for range lsFileLineChan {
+ // empty channel
+ }
+ }()
+
+ next := &unmergedFile{}
+ for line := range lsFileLineChan {
+ log.Trace("Got line: %v Current State:\n%v", line, next)
+ if line.err != nil {
+ log.Error("Unable to run ls-files -u -z! Error: %v", line.err)
+ unmerged <- &unmergedFile{err: fmt.Errorf("unable to run ls-files -u -z! Error: %w", line.err)}
+ return
+ }
+
+ // stages are always emitted 1,2,3 but sometimes 1, 2 or 3 are dropped
+ switch line.stage {
+ case 0:
+ // Should not happen as this represents successfully merged file - we will tolerate and ignore though
+ case 1:
+ if next.stage1 != nil || next.stage2 != nil || next.stage3 != nil {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ }
+ next = &unmergedFile{stage1: line}
+ case 2:
+ if next.stage3 != nil || next.stage2 != nil || (next.stage1 != nil && next.stage1.path != line.path) {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ next = &unmergedFile{}
+ }
+ next.stage2 = line
+ case 3:
+ if next.stage3 != nil || (next.stage1 != nil && next.stage1.path != line.path) || (next.stage2 != nil && next.stage2.path != line.path) {
+ // We need to handle the unstaged file stage1,stage2,stage3
+ unmerged <- next
+ next = &unmergedFile{}
+ }
+ next.stage3 = line
+ default:
+ log.Error("Unexpected stage %d for path %s in run ls-files -u -z!", line.stage, line.path)
+ unmerged <- &unmergedFile{err: fmt.Errorf("unexpected stage %d for path %s in git ls-files -u -z", line.stage, line.path)}
+ return
+ }
+ }
+ // We need to handle the unstaged file stage1,stage2,stage3
+ if next.stage1 != nil || next.stage2 != nil || next.stage3 != nil {
+ unmerged <- next
+ }
+}
diff --git a/services/pull/pull.go b/services/pull/pull.go
new file mode 100644
index 0000000..6af7d8b
--- /dev/null
+++ b/services/pull/pull.go
@@ -0,0 +1,1032 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sync"
+ "code.gitea.io/gitea/modules/util"
+ gitea_context "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+// TODO: use clustered lock (unique queue? or *abuse* cache)
+var pullWorkingPool = sync.NewExclusivePool()
+
+// NewPullRequest creates new pull request with labels for repository.
+func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *issues_model.Issue, labelIDs []int64, uuids []string, pr *issues_model.PullRequest, assigneeIDs []int64) error {
+ // Check if the doer is not blocked by the repository's owner.
+ if user_model.IsBlocked(ctx, repo.OwnerID, issue.PosterID) {
+ return user_model.ErrBlockedByUser
+ }
+
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return err
+ }
+ defer cancel()
+
+ if err := testPatch(ctx, prCtx, pr); err != nil {
+ return err
+ }
+
+ divergence, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ if err != nil {
+ return err
+ }
+ pr.CommitsAhead = divergence.Ahead
+ pr.CommitsBehind = divergence.Behind
+
+ assigneeCommentMap := make(map[int64]*issues_model.Comment)
+
+ // add first push codes comment
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ return err
+ }
+ defer baseGitRepo.Close()
+
+ var reviewNotifers []*issue_service.ReviewRequestNotifier
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ if err := issues_model.NewPullRequest(ctx, repo, issue, labelIDs, uuids, pr); err != nil {
+ return err
+ }
+
+ for _, assigneeID := range assigneeIDs {
+ comment, err := issue_service.AddAssigneeIfNotAssigned(ctx, issue, issue.Poster, assigneeID, false)
+ if err != nil {
+ return err
+ }
+ assigneeCommentMap[assigneeID] = comment
+ }
+
+ pr.Issue = issue
+ issue.PullRequest = pr
+
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ err = PushToBaseRepo(ctx, pr)
+ } else {
+ err = UpdateRef(ctx, pr)
+ }
+ if err != nil {
+ return err
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pr.BaseRepo.RepoPath(),
+ git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName(), false, false)
+ if err != nil {
+ return err
+ }
+ if len(compareInfo.Commits) == 0 {
+ return nil
+ }
+
+ data := issues_model.PushActionContent{IsForcePush: false}
+ data.CommitIDs = make([]string, 0, len(compareInfo.Commits))
+ for i := len(compareInfo.Commits) - 1; i >= 0; i-- {
+ data.CommitIDs = append(data.CommitIDs, compareInfo.Commits[i].ID.String())
+ }
+
+ dataJSON, err := json.Marshal(data)
+ if err != nil {
+ return err
+ }
+
+ ops := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypePullRequestPush,
+ Doer: issue.Poster,
+ Repo: repo,
+ Issue: pr.Issue,
+ IsForcePush: false,
+ Content: string(dataJSON),
+ }
+
+ if _, err = issues_model.CreateComment(ctx, ops); err != nil {
+ return err
+ }
+
+ if !pr.IsWorkInProgress(ctx) {
+ reviewNotifers, err = issue_service.PullRequestCodeOwnersReview(ctx, issue, pr)
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ }); err != nil {
+ // cleanup: this will only remove the reference, the real commit will be clean up when next GC
+ if err1 := baseGitRepo.RemoveReference(pr.GetGitRefName()); err1 != nil {
+ log.Error("RemoveReference: %v", err1)
+ }
+ return err
+ }
+ baseGitRepo.Close() // close immediately to avoid notifications will open the repository again
+
+ issue_service.ReviewRequestNotify(ctx, issue, issue.Poster, reviewNotifers)
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, issue.Poster, issue.Content)
+ if err != nil {
+ return err
+ }
+ notify_service.NewPullRequest(ctx, pr, mentions)
+ if len(issue.Labels) > 0 {
+ notify_service.IssueChangeLabels(ctx, issue.Poster, issue, issue.Labels, nil)
+ }
+ if issue.Milestone != nil {
+ notify_service.IssueChangeMilestone(ctx, issue.Poster, issue, 0)
+ }
+ for _, assigneeID := range assigneeIDs {
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ return ErrDependenciesLeft
+ }
+ notify_service.IssueChangeAssignee(ctx, issue.Poster, issue, assignee, false, assigneeCommentMap[assigneeID])
+ }
+
+ return nil
+}
+
+// ChangeTargetBranch changes the target branch of this pull request, as the given user.
+func ChangeTargetBranch(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, targetBranch string) (err error) {
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ // Current target branch is already the same
+ if pr.BaseBranch == targetBranch {
+ return nil
+ }
+
+ if pr.Issue.IsClosed {
+ return issues_model.ErrIssueIsClosed{
+ ID: pr.Issue.ID,
+ RepoID: pr.Issue.RepoID,
+ Index: pr.Issue.Index,
+ }
+ }
+
+ if pr.HasMerged {
+ return models.ErrPullRequestHasMerged{
+ ID: pr.ID,
+ IssueID: pr.Index,
+ HeadRepoID: pr.HeadRepoID,
+ BaseRepoID: pr.BaseRepoID,
+ HeadBranch: pr.HeadBranch,
+ BaseBranch: pr.BaseBranch,
+ }
+ }
+
+ // Check if branches are equal
+ branchesEqual, err := IsHeadEqualWithBranch(ctx, pr, targetBranch)
+ if err != nil {
+ return err
+ }
+ if branchesEqual {
+ return git_model.ErrBranchesEqual{
+ HeadBranchName: pr.HeadBranch,
+ BaseBranchName: targetBranch,
+ }
+ }
+
+ // Check if pull request for the new target branch already exists
+ existingPr, err := issues_model.GetUnmergedPullRequest(ctx, pr.HeadRepoID, pr.BaseRepoID, pr.HeadBranch, targetBranch, issues_model.PullRequestFlowGithub)
+ if existingPr != nil {
+ return issues_model.ErrPullRequestAlreadyExists{
+ ID: existingPr.ID,
+ IssueID: existingPr.Index,
+ HeadRepoID: existingPr.HeadRepoID,
+ BaseRepoID: existingPr.BaseRepoID,
+ HeadBranch: existingPr.HeadBranch,
+ BaseBranch: existingPr.BaseBranch,
+ }
+ }
+ if err != nil && !issues_model.IsErrPullRequestNotExist(err) {
+ return err
+ }
+
+ // Set new target branch
+ oldBranch := pr.BaseBranch
+ pr.BaseBranch = targetBranch
+
+ // Refresh patch
+ if err := TestPatch(pr); err != nil {
+ return err
+ }
+
+ // Update target branch, PR diff and status
+ // This is the same as checkAndUpdateStatus in check service, but also updates base_branch
+ if pr.Status == issues_model.PullRequestStatusChecking {
+ pr.Status = issues_model.PullRequestStatusMergeable
+ }
+
+ // Update Commit Divergence
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ return err
+ }
+ pr.CommitsAhead = divergence.Ahead
+ pr.CommitsBehind = divergence.Behind
+
+ if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files", "base_branch", "commits_ahead", "commits_behind"); err != nil {
+ return err
+ }
+
+ // Create comment
+ options := &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeChangeTargetBranch,
+ Doer: doer,
+ Repo: pr.Issue.Repo,
+ Issue: pr.Issue,
+ OldRef: oldBranch,
+ NewRef: targetBranch,
+ }
+ if _, err = issues_model.CreateComment(ctx, options); err != nil {
+ return fmt.Errorf("CreateChangeTargetBranchComment: %w", err)
+ }
+
+ return nil
+}
+
+func checkForInvalidation(ctx context.Context, requests issues_model.PullRequestList, repoID int64, doer *user_model.User, branch string) error {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ return fmt.Errorf("GetRepositoryByIDCtx: %w", err)
+ }
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return fmt.Errorf("gitrepo.OpenRepository: %w", err)
+ }
+ go func() {
+ // FIXME: graceful: We need to tell the manager we're doing something...
+ err := InvalidateCodeComments(ctx, requests, doer, gitRepo, branch)
+ if err != nil {
+ log.Error("PullRequestList.InvalidateCodeComments: %v", err)
+ }
+ gitRepo.Close()
+ }()
+ return nil
+}
+
+// AddTestPullRequestTask adds new test tasks by given head/base repository and head/base branch,
+// and generate new patch for testing as needed.
+func AddTestPullRequestTask(ctx context.Context, doer *user_model.User, repoID int64, branch string, isSync bool, oldCommitID, newCommitID string, timeNano int64) {
+ description := fmt.Sprintf("AddTestPullRequestTask [head_repo_id: %d, head_branch: %s]: only pull requests created before nano time %d will be considered", repoID, branch, timeNano)
+ log.Trace(description)
+ go graceful.GetManager().RunWithShutdownContext(func(shutdownCtx context.Context) {
+ // make it a process to allow for cancellation (especially during integration tests where no global shutdown happens)
+ ctx, _, finished := process.GetManager().AddContext(shutdownCtx, description)
+ defer finished()
+ // There is no sensible way to shut this down ":-("
+ // If you don't let it run all the way then you will lose data
+ // TODO: graceful: TestPullRequest needs to become a queue!
+
+ TestPullRequest(ctx, doer, repoID, timeNano, branch, isSync, oldCommitID, newCommitID)
+ })
+}
+
+func TestPullRequest(ctx context.Context, doer *user_model.User, repoID, olderThan int64, branch string, isSync bool, oldCommitID, newCommitID string) {
+ // Only consider PR that are older than olderThan, which is the time at
+ // which the newCommitID was added to repoID.
+ //
+ // * commit C is pushed
+ // * the git hook queues AddTestPullRequestTask for processing and returns with success
+ // * TestPullRequest is not called yet
+ // * a pull request P with commit C as the head is created
+ // * TestPullRequest runs and ignores P because it was created after the commit was received
+ //
+ // In other words, a PR must not be updated based on events that happened before it existed
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfoMax(ctx, repoID, olderThan, branch)
+ if err != nil {
+ log.Error("Find pull requests [head_repo_id: %d, head_branch: %s]: %v", repoID, branch, err)
+ return
+ }
+
+ for _, pr := range prs {
+ log.Trace("Updating PR[id=%d,index=%d]: composing new test task", pr.ID, pr.Index)
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ if err := PushToBaseRepo(ctx, pr); err != nil {
+ log.Error("PushToBaseRepo: %v", err)
+ continue
+ }
+ } else {
+ continue
+ }
+
+ AddToTaskQueue(ctx, pr)
+ comment, err := CreatePushPullComment(ctx, doer, pr, oldCommitID, newCommitID)
+ if err == nil && comment != nil {
+ notify_service.PullRequestPushCommits(ctx, doer, pr, comment)
+ }
+ }
+
+ if isSync {
+ requests := issues_model.PullRequestList(prs)
+ if err = requests.LoadAttributes(ctx); err != nil {
+ log.Error("PullRequestList.LoadAttributes: %v", err)
+ }
+ if invalidationErr := checkForInvalidation(ctx, requests, repoID, doer, branch); invalidationErr != nil {
+ log.Error("checkForInvalidation: %v", invalidationErr)
+ }
+ if err == nil {
+ for _, pr := range prs {
+ ValidatePullRequest(ctx, pr, newCommitID, oldCommitID, doer)
+ notify_service.PullRequestSynchronized(ctx, doer, pr)
+ }
+ }
+ }
+
+ log.Trace("TestPullRequest [base_repo_id: %d, base_branch: %s]: finding pull requests", repoID, branch)
+ prs, err = issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ log.Error("Find pull requests [base_repo_id: %d, base_branch: %s]: %v", repoID, branch, err)
+ return
+ }
+ for _, pr := range prs {
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ if git_model.IsErrBranchNotExist(err) && !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ log.Warn("Cannot test PR %s/%d: head_branch %s no longer exists", pr.BaseRepo.Name, pr.IssueID, pr.HeadBranch)
+ } else {
+ log.Error("GetDiverging: %v", err)
+ }
+ } else {
+ err = pr.UpdateCommitDivergence(ctx, divergence.Ahead, divergence.Behind)
+ if err != nil {
+ log.Error("UpdateCommitDivergence: %v", err)
+ }
+ }
+ AddToTaskQueue(ctx, pr)
+ }
+}
+
+// Mark old reviews as stale if diff to mergebase has changed.
+// Dismiss all approval reviews if protected branch rule item enabled.
+// Update commit divergence.
+func ValidatePullRequest(ctx context.Context, pr *issues_model.PullRequest, newCommitID, oldCommitID string, doer *user_model.User) {
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ if newCommitID != "" && newCommitID != objectFormat.EmptyObjectID().String() {
+ changed, err := checkIfPRContentChanged(ctx, pr, oldCommitID, newCommitID)
+ if err != nil {
+ log.Error("checkIfPRContentChanged: %v", err)
+ }
+ if changed {
+ if err := issues_model.MarkReviewsAsStale(ctx, pr.IssueID); err != nil {
+ log.Error("MarkReviewsAsStale: %v", err)
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ log.Error("GetFirstMatchProtectedBranchRule: %v", err)
+ }
+ if pb != nil && pb.DismissStaleApprovals {
+ if err := DismissApprovalReviews(ctx, doer, pr); err != nil {
+ log.Error("DismissApprovalReviews: %v", err)
+ }
+ }
+ }
+ if err := issues_model.MarkReviewsAsNotStale(ctx, pr.IssueID, newCommitID); err != nil {
+ log.Error("MarkReviewsAsNotStale: %v", err)
+ }
+ divergence, err := GetDiverging(ctx, pr)
+ if err != nil {
+ log.Error("GetDiverging: %v", err)
+ } else {
+ err = pr.UpdateCommitDivergence(ctx, divergence.Ahead, divergence.Behind)
+ if err != nil {
+ log.Error("UpdateCommitDivergence: %v", err)
+ }
+ }
+ }
+}
+
+// checkIfPRContentChanged checks if diff to target branch has changed by push
+// A commit can be considered to leave the PR untouched if the patch/diff with its merge base is unchanged
+func checkIfPRContentChanged(ctx context.Context, pr *issues_model.PullRequest, oldCommitID, newCommitID string) (hasChanged bool, err error) {
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ return false, err
+ }
+ defer cancel()
+
+ tmpRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
+ if err != nil {
+ return false, fmt.Errorf("OpenRepository: %w", err)
+ }
+ defer tmpRepo.Close()
+
+ // Find the merge-base
+ _, base, err := tmpRepo.GetMergeBase("", "base", "tracking")
+ if err != nil {
+ return false, fmt.Errorf("GetMergeBase: %w", err)
+ }
+
+ cmd := git.NewCommand(ctx, "diff", "--name-only", "-z").AddDynamicArguments(newCommitID, oldCommitID, base)
+ stdoutReader, stdoutWriter, err := os.Pipe()
+ if err != nil {
+ return false, fmt.Errorf("unable to open pipe for to run diff: %w", err)
+ }
+
+ stderr := new(bytes.Buffer)
+ if err := cmd.Run(&git.RunOpts{
+ Dir: prCtx.tmpBasePath,
+ Stdout: stdoutWriter,
+ Stderr: stderr,
+ PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
+ _ = stdoutWriter.Close()
+ defer func() {
+ _ = stdoutReader.Close()
+ }()
+ return util.IsEmptyReader(stdoutReader)
+ },
+ }); err != nil {
+ if err == util.ErrNotEmpty {
+ return true, nil
+ }
+ err = git.ConcatenateError(err, stderr.String())
+
+ log.Error("Unable to run diff on %s %s %s in tempRepo for PR[%d]%s/%s...%s/%s: Error: %v",
+ newCommitID, oldCommitID, base,
+ pr.ID, pr.BaseRepo.FullName(), pr.BaseBranch, pr.HeadRepo.FullName(), pr.HeadBranch,
+ err)
+
+ return false, fmt.Errorf("Unable to run git diff --name-only -z %s %s %s: %w", newCommitID, oldCommitID, base, err)
+ }
+
+ return false, nil
+}
+
+// PushToBaseRepo pushes commits from branches of head repository to
+// corresponding branches of base repository.
+// FIXME: Only push branches that are actually updates?
+func PushToBaseRepo(ctx context.Context, pr *issues_model.PullRequest) (err error) {
+ return pushToBaseRepoHelper(ctx, pr, "")
+}
+
+func pushToBaseRepoHelper(ctx context.Context, pr *issues_model.PullRequest, prefixHeadBranch string) (err error) {
+ log.Trace("PushToBaseRepo[%d]: pushing commits to base repo '%s'", pr.BaseRepoID, pr.GetGitRefName())
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("Unable to load head repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+ headRepoPath := pr.HeadRepo.RepoPath()
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+ baseRepoPath := pr.BaseRepo.RepoPath()
+
+ if err = pr.LoadIssue(ctx); err != nil {
+ return fmt.Errorf("unable to load issue %d for pr %d: %w", pr.IssueID, pr.ID, err)
+ }
+ if err = pr.Issue.LoadPoster(ctx); err != nil {
+ return fmt.Errorf("unable to load poster %d for pr %d: %w", pr.Issue.PosterID, pr.ID, err)
+ }
+
+ gitRefName := pr.GetGitRefName()
+
+ if err := git.Push(ctx, headRepoPath, git.PushOptions{
+ Remote: baseRepoPath,
+ Branch: prefixHeadBranch + pr.HeadBranch + ":" + gitRefName,
+ Force: true,
+ // Use InternalPushingEnvironment here because we know that pre-receive and post-receive do not run on a refs/pulls/...
+ Env: repo_module.InternalPushingEnvironment(pr.Issue.Poster, pr.BaseRepo),
+ }); err != nil {
+ if git.IsErrPushOutOfDate(err) {
+ // This should not happen as we're using force!
+ log.Error("Unable to push PR head for %s#%d (%-v:%s) due to ErrPushOfDate: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
+ return err
+ } else if git.IsErrPushRejected(err) {
+ rejectErr := err.(*git.ErrPushRejected)
+ log.Info("Unable to push PR head for %s#%d (%-v:%s) due to rejection:\nStdout: %s\nStderr: %s\nError: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, rejectErr.StdOut, rejectErr.StdErr, rejectErr.Err)
+ return err
+ } else if git.IsErrMoreThanOne(err) {
+ if prefixHeadBranch != "" {
+ log.Info("Can't push with %s%s", prefixHeadBranch, pr.HeadBranch)
+ return err
+ }
+ log.Info("Retrying to push with %s%s", git.BranchPrefix, pr.HeadBranch)
+ err = pushToBaseRepoHelper(ctx, pr, git.BranchPrefix)
+ return err
+ }
+ log.Error("Unable to push PR head for %s#%d (%-v:%s) due to Error: %v", pr.BaseRepo.FullName(), pr.Index, pr.BaseRepo, gitRefName, err)
+ return fmt.Errorf("Push: %s:%s %s:%s %w", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), gitRefName, err)
+ }
+
+ return nil
+}
+
+// UpdateRef update refs/pull/id/head directly for agit flow pull request
+func UpdateRef(ctx context.Context, pr *issues_model.PullRequest) (err error) {
+ log.Trace("UpdateRef[%d]: upgate pull request ref in base repo '%s'", pr.ID, pr.GetGitRefName())
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("Unable to load base repository for PR[%d] Error: %v", pr.ID, err)
+ return err
+ }
+
+ _, _, err = git.NewCommand(ctx, "update-ref").AddDynamicArguments(pr.GetGitRefName(), pr.HeadCommitID).RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()})
+ if err != nil {
+ log.Error("Unable to update ref in base repository for PR[%d] Error: %v", pr.ID, err)
+ }
+
+ return err
+}
+
+type errlist []error
+
+func (errs errlist) Error() string {
+ if len(errs) > 0 {
+ var buf strings.Builder
+ for i, err := range errs {
+ if i > 0 {
+ buf.WriteString(", ")
+ }
+ buf.WriteString(err.Error())
+ }
+ return buf.String()
+ }
+ return ""
+}
+
+// RetargetChildrenOnMerge retarget children pull requests on merge if possible
+func RetargetChildrenOnMerge(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) error {
+ if setting.Repository.PullRequest.RetargetChildrenOnMerge && pr.BaseRepoID == pr.HeadRepoID {
+ return RetargetBranchPulls(ctx, doer, pr.HeadRepoID, pr.HeadBranch, pr.BaseBranch)
+ }
+ return nil
+}
+
+// RetargetBranchPulls change target branch for all pull requests whose base branch is the branch
+// Both branch and targetBranch must be in the same repo (for security reasons)
+func RetargetBranchPulls(ctx context.Context, doer *user_model.User, repoID int64, branch, targetBranch string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, pr := range prs {
+ if err = pr.Issue.LoadRepo(ctx); err != nil {
+ errs = append(errs, err)
+ } else if err = ChangeTargetBranch(ctx, pr, doer, targetBranch); err != nil &&
+ !issues_model.IsErrIssueIsClosed(err) && !models.IsErrPullRequestHasMerged(err) &&
+ !issues_model.IsErrPullRequestAlreadyExists(err) {
+ errs = append(errs, err)
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+// CloseBranchPulls close all the pull requests who's head branch is the branch
+func CloseBranchPulls(ctx context.Context, doer *user_model.User, repoID int64, branch string) error {
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ prs2, err := issues_model.GetUnmergedPullRequestsByBaseInfo(ctx, repoID, branch)
+ if err != nil {
+ return err
+ }
+
+ prs = append(prs, prs2...)
+ if err := issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, pr := range prs {
+ if err = issue_service.ChangeStatus(ctx, pr.Issue, doer, "", true); err != nil && !issues_model.IsErrPullWasClosed(err) && !issues_model.IsErrDependenciesLeft(err) {
+ errs = append(errs, err)
+ }
+ }
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+// CloseRepoBranchesPulls close all pull requests which head branches are in the given repository, but only whose base repo is not in the given repository
+func CloseRepoBranchesPulls(ctx context.Context, doer *user_model.User, repo *repo_model.Repository) error {
+ branches, _, err := gitrepo.GetBranchesByPath(ctx, repo, 0, 0)
+ if err != nil {
+ return err
+ }
+
+ var errs errlist
+ for _, branch := range branches {
+ prs, err := issues_model.GetUnmergedPullRequestsByHeadInfo(ctx, repo.ID, branch.Name)
+ if err != nil {
+ return err
+ }
+
+ if err = issues_model.PullRequestList(prs).LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ for _, pr := range prs {
+ // If the base repository for this pr is this repository there is no need to close it
+ // as it is going to be deleted anyway
+ if pr.BaseRepoID == repo.ID {
+ continue
+ }
+ if err = issue_service.ChangeStatus(ctx, pr.Issue, doer, "", true); err != nil && !issues_model.IsErrPullWasClosed(err) {
+ errs = append(errs, err)
+ }
+ }
+ }
+
+ if len(errs) > 0 {
+ return errs
+ }
+ return nil
+}
+
+var commitMessageTrailersPattern = regexp.MustCompile(`(?:^|\n\n)(?:[\w-]+[ \t]*:[^\n]+\n*(?:[ \t]+[^\n]+\n*)*)+$`)
+
+// GetSquashMergeCommitMessages returns the commit messages between head and merge base (if there is one)
+func GetSquashMergeCommitMessages(ctx context.Context, pr *issues_model.PullRequest) string {
+ if err := pr.LoadIssue(ctx); err != nil {
+ log.Error("Cannot load issue %d for PR id %d: Error: %v", pr.IssueID, pr.ID, err)
+ return ""
+ }
+
+ if err := pr.Issue.LoadPoster(ctx); err != nil {
+ log.Error("Cannot load poster %d for pr id %d, index %d Error: %v", pr.Issue.PosterID, pr.ID, pr.Index, err)
+ return ""
+ }
+
+ if pr.HeadRepo == nil {
+ var err error
+ pr.HeadRepo, err = repo_model.GetRepositoryByID(ctx, pr.HeadRepoID)
+ if err != nil {
+ log.Error("GetRepositoryByIdCtx[%d]: %v", pr.HeadRepoID, err)
+ return ""
+ }
+ }
+
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ log.Error("Unable to open head repository: Error: %v", err)
+ return ""
+ }
+ defer closer.Close()
+
+ var headCommit *git.Commit
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headCommit, err = gitRepo.GetBranchCommit(pr.HeadBranch)
+ } else {
+ pr.HeadCommitID, err = gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("Unable to get head commit: %s Error: %v", pr.GetGitRefName(), err)
+ return ""
+ }
+ headCommit, err = gitRepo.GetCommit(pr.HeadCommitID)
+ }
+ if err != nil {
+ log.Error("Unable to get head commit: %s Error: %v", pr.HeadBranch, err)
+ return ""
+ }
+
+ mergeBase, err := gitRepo.GetCommit(pr.MergeBase)
+ if err != nil {
+ log.Error("Unable to get merge base commit: %s Error: %v", pr.MergeBase, err)
+ return ""
+ }
+
+ limit := setting.Repository.PullRequest.DefaultMergeMessageCommitsLimit
+
+ commits, err := gitRepo.CommitsBetweenLimit(headCommit, mergeBase, limit, 0)
+ if err != nil {
+ log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err)
+ return ""
+ }
+
+ posterSig := pr.Issue.Poster.NewGitSig().String()
+
+ uniqueAuthors := make(container.Set[string])
+ authors := make([]string, 0, len(commits))
+ stringBuilder := strings.Builder{}
+
+ if !setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages {
+ message := strings.TrimSpace(pr.Issue.Content)
+ stringBuilder.WriteString(message)
+ if stringBuilder.Len() > 0 {
+ stringBuilder.WriteRune('\n')
+ if !commitMessageTrailersPattern.MatchString(message) {
+ stringBuilder.WriteRune('\n')
+ }
+ }
+ }
+
+ // commits list is in reverse chronological order
+ first := true
+ for i := len(commits) - 1; i >= 0; i-- {
+ commit := commits[i]
+
+ if setting.Repository.PullRequest.PopulateSquashCommentWithCommitMessages {
+ maxSize := setting.Repository.PullRequest.DefaultMergeMessageSize
+ if maxSize < 0 || stringBuilder.Len() < maxSize {
+ var toWrite []byte
+ if first {
+ first = false
+ toWrite = []byte(strings.TrimPrefix(commit.CommitMessage, pr.Issue.Title))
+ } else {
+ toWrite = []byte(commit.CommitMessage)
+ }
+
+ if len(toWrite) > maxSize-stringBuilder.Len() && maxSize > -1 {
+ toWrite = append(toWrite[:maxSize-stringBuilder.Len()], "..."...)
+ }
+ if _, err := stringBuilder.Write(toWrite); err != nil {
+ log.Error("Unable to write commit message Error: %v", err)
+ return ""
+ }
+
+ if _, err := stringBuilder.WriteRune('\n'); err != nil {
+ log.Error("Unable to write commit message Error: %v", err)
+ return ""
+ }
+ }
+ }
+
+ authorString := commit.Author.String()
+ if uniqueAuthors.Add(authorString) && authorString != posterSig {
+ // Compare use account as well to avoid adding the same author multiple times
+ // times when email addresses are private or multiple emails are used.
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser == nil || commitUser.ID != pr.Issue.Poster.ID {
+ authors = append(authors, authorString)
+ }
+ }
+ }
+
+ // Consider collecting the remaining authors
+ if limit >= 0 && setting.Repository.PullRequest.DefaultMergeMessageAllAuthors {
+ skip := limit
+ limit = 30
+ for {
+ commits, err := gitRepo.CommitsBetweenLimit(headCommit, mergeBase, limit, skip)
+ if err != nil {
+ log.Error("Unable to get commits between: %s %s Error: %v", pr.HeadBranch, pr.MergeBase, err)
+ return ""
+ }
+ if len(commits) == 0 {
+ break
+ }
+ for _, commit := range commits {
+ authorString := commit.Author.String()
+ if uniqueAuthors.Add(authorString) && authorString != posterSig {
+ commitUser, _ := user_model.GetUserByEmail(ctx, commit.Author.Email)
+ if commitUser == nil || commitUser.ID != pr.Issue.Poster.ID {
+ authors = append(authors, authorString)
+ }
+ }
+ }
+ skip += limit
+ }
+ }
+
+ for _, author := range authors {
+ if _, err := stringBuilder.WriteString("Co-authored-by: "); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ if _, err := stringBuilder.WriteString(author); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ if _, err := stringBuilder.WriteRune('\n'); err != nil {
+ log.Error("Unable to write to string builder Error: %v", err)
+ return ""
+ }
+ }
+
+ return stringBuilder.String()
+}
+
+// GetIssuesLastCommitStatus returns a map of issue ID to the most recent commit's latest status
+func GetIssuesLastCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64]*git_model.CommitStatus, error) {
+ _, lastStatus, err := GetIssuesAllCommitStatus(ctx, issues)
+ return lastStatus, err
+}
+
+// GetIssuesAllCommitStatus returns a map of issue ID to a list of all statuses for the most recent commit as well as a map of issue ID to only the commit's latest status
+func GetIssuesAllCommitStatus(ctx context.Context, issues issues_model.IssueList) (map[int64][]*git_model.CommitStatus, map[int64]*git_model.CommitStatus, error) {
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ return nil, nil, err
+ }
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ return nil, nil, err
+ }
+
+ var (
+ gitRepos = make(map[int64]*git.Repository)
+ res = make(map[int64][]*git_model.CommitStatus)
+ lastRes = make(map[int64]*git_model.CommitStatus)
+ err error
+ )
+ defer func() {
+ for _, gitRepo := range gitRepos {
+ gitRepo.Close()
+ }
+ }()
+
+ for _, issue := range issues {
+ if !issue.IsPull {
+ continue
+ }
+ gitRepo, ok := gitRepos[issue.RepoID]
+ if !ok {
+ gitRepo, err = gitrepo.OpenRepository(ctx, issue.Repo)
+ if err != nil {
+ log.Error("Cannot open git repository %-v for issue #%d[%d]. Error: %v", issue.Repo, issue.Index, issue.ID, err)
+ continue
+ }
+ gitRepos[issue.RepoID] = gitRepo
+ }
+
+ statuses, lastStatus, err := getAllCommitStatus(ctx, gitRepo, issue.PullRequest)
+ if err != nil {
+ log.Error("getAllCommitStatus: can't get commit statuses of pull [%d]: %v", issue.PullRequest.ID, err)
+ continue
+ }
+ res[issue.PullRequest.ID] = statuses
+ lastRes[issue.PullRequest.ID] = lastStatus
+ }
+ return res, lastRes, nil
+}
+
+// getAllCommitStatus get pr's commit statuses.
+func getAllCommitStatus(ctx context.Context, gitRepo *git.Repository, pr *issues_model.PullRequest) (statuses []*git_model.CommitStatus, lastStatus *git_model.CommitStatus, err error) {
+ sha, shaErr := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if shaErr != nil {
+ return nil, nil, shaErr
+ }
+
+ statuses, _, err = git_model.GetLatestCommitStatus(ctx, pr.BaseRepo.ID, sha, db.ListOptionsAll)
+ lastStatus = git_model.CalcCommitStatus(statuses)
+ return statuses, lastStatus, err
+}
+
+// IsHeadEqualWithBranch returns if the commits of branchName are available in pull request head
+func IsHeadEqualWithBranch(ctx context.Context, pr *issues_model.PullRequest, branchName string) (bool, error) {
+ var err error
+ if err = pr.LoadBaseRepo(ctx); err != nil {
+ return false, err
+ }
+ baseGitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return false, err
+ }
+ defer closer.Close()
+
+ baseCommit, err := baseGitRepo.GetBranchCommit(branchName)
+ if err != nil {
+ return false, err
+ }
+
+ if err = pr.LoadHeadRepo(ctx); err != nil {
+ return false, err
+ }
+ var headGitRepo *git.Repository
+ if pr.HeadRepoID == pr.BaseRepoID {
+ headGitRepo = baseGitRepo
+ } else {
+ var closer io.Closer
+
+ headGitRepo, closer, err = gitrepo.RepositoryFromContextOrOpen(ctx, pr.HeadRepo)
+ if err != nil {
+ return false, err
+ }
+ defer closer.Close()
+ }
+
+ var headCommit *git.Commit
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headCommit, err = headGitRepo.GetBranchCommit(pr.HeadBranch)
+ if err != nil {
+ return false, err
+ }
+ } else {
+ pr.HeadCommitID, err = baseGitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return false, err
+ }
+ if headCommit, err = baseGitRepo.GetCommit(pr.HeadCommitID); err != nil {
+ return false, err
+ }
+ }
+ return baseCommit.HasPreviousCommit(headCommit.ID)
+}
+
+type CommitInfo struct {
+ Summary string `json:"summary"`
+ CommitterOrAuthorName string `json:"committer_or_author_name"`
+ ID string `json:"id"`
+ ShortSha string `json:"short_sha"`
+ Time string `json:"time"`
+}
+
+// GetPullCommits returns all commits on given pull request and the last review commit sha
+// Attention: The last review commit sha must be from the latest review whose commit id is not empty.
+// So the type of the latest review cannot be "ReviewTypeRequest".
+func GetPullCommits(ctx *gitea_context.Context, issue *issues_model.Issue) ([]CommitInfo, string, error) {
+ pull := issue.PullRequest
+
+ baseGitRepo := ctx.Repo.GitRepo
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ return nil, "", err
+ }
+ baseBranch := pull.BaseBranch
+ if pull.HasMerged {
+ baseBranch = pull.MergeBase
+ }
+ prInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(), baseBranch, pull.GetGitRefName(), true, false)
+ if err != nil {
+ return nil, "", err
+ }
+
+ commits := make([]CommitInfo, 0, len(prInfo.Commits))
+
+ for _, commit := range prInfo.Commits {
+ var committerOrAuthorName string
+ var commitTime time.Time
+ if commit.Author != nil {
+ committerOrAuthorName = commit.Author.Name
+ commitTime = commit.Author.When
+ } else {
+ committerOrAuthorName = commit.Committer.Name
+ commitTime = commit.Committer.When
+ }
+
+ commits = append(commits, CommitInfo{
+ Summary: commit.Summary(),
+ CommitterOrAuthorName: committerOrAuthorName,
+ ID: commit.ID.String(),
+ ShortSha: base.ShortSha(commit.ID.String()),
+ Time: commitTime.Format(time.RFC3339),
+ })
+ }
+
+ var lastReviewCommitID string
+ if ctx.IsSigned {
+ // get last review of current user and store information in context (if available)
+ lastreview, err := issues_model.FindLatestReviews(ctx, issues_model.FindReviewOptions{
+ IssueID: issue.ID,
+ ReviewerID: ctx.Doer.ID,
+ Types: []issues_model.ReviewType{
+ issues_model.ReviewTypeApprove,
+ issues_model.ReviewTypeComment,
+ issues_model.ReviewTypeReject,
+ },
+ })
+
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ return nil, "", err
+ }
+ if len(lastreview) > 0 {
+ lastReviewCommitID = lastreview[0].CommitID
+ }
+ }
+
+ return commits, lastReviewCommitID, nil
+}
diff --git a/services/pull/pull_test.go b/services/pull/pull_test.go
new file mode 100644
index 0000000..c51619e
--- /dev/null
+++ b/services/pull/pull_test.go
@@ -0,0 +1,94 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+// TODO TestPullRequest_PushToBaseRepo
+
+func TestPullRequest_CommitMessageTrailersPattern(t *testing.T) {
+ // Not a valid trailer section
+ assert.False(t, commitMessageTrailersPattern.MatchString(""))
+ assert.False(t, commitMessageTrailersPattern.MatchString("No trailer."))
+ assert.False(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>\nNot a trailer due to following text."))
+ assert.False(t, commitMessageTrailersPattern.MatchString("Message body not correctly separated from trailer section by empty line.\nSigned-off-by: Bob <bob@example.com>"))
+ // Valid trailer section
+ assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Signed-off-by: Bob <bob@example.com>\nOther-Trailer: Value"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Message body correctly separated from trailer section by empty line.\n\nSigned-off-by: Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Multiple trailers.\n\nSigned-off-by: Bob <bob@example.com>\nOther-Trailer: Value"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Newline after trailer section.\n\nSigned-off-by: Bob <bob@example.com>\n"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("No space after colon is accepted.\n\nSigned-off-by:Bob <bob@example.com>"))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Additional whitespace is accepted.\n\nSigned-off-by \t : \tBob <bob@example.com> "))
+ assert.True(t, commitMessageTrailersPattern.MatchString("Folded value.\n\nFolded-trailer: This is\n a folded\n trailer value\nOther-Trailer: Value"))
+}
+
+func TestPullRequest_GetDefaultMergeMessage_InternalTracker(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2})
+
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, pr.BaseRepo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ mergeMessage, body, err := GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from branch2 into master", mergeMessage)
+ assert.Equal(t, "Reviewed-on: https://try.gitea.io/user2/repo1/pulls/3\n", body)
+
+ pr.BaseRepoID = 1
+ pr.HeadRepoID = 2
+ mergeMessage, _, err = GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo1:branch2 into master", mergeMessage)
+}
+
+func TestPullRequest_GetDefaultMergeMessage_ExternalTracker(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ externalTracker := repo_model.RepoUnit{
+ Type: unit.TypeExternalTracker,
+ Config: &repo_model.ExternalTrackerConfig{
+ ExternalTrackerFormat: "https://someurl.com/{user}/{repo}/{issue}",
+ },
+ }
+ baseRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ baseRepo.Units = []*repo_model.RepoUnit{&externalTracker}
+
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 2, BaseRepo: baseRepo})
+
+ require.NoError(t, pr.LoadBaseRepo(db.DefaultContext))
+ gitRepo, err := gitrepo.OpenRepository(git.DefaultContext, pr.BaseRepo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ mergeMessage, _, err := GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+
+ assert.Equal(t, "Merge pull request 'issue3' (!3) from branch2 into master", mergeMessage)
+
+ pr.BaseRepoID = 1
+ pr.HeadRepoID = 2
+ pr.BaseRepo = nil
+ pr.HeadRepo = nil
+ mergeMessage, _, err = GetDefaultMergeMessage(db.DefaultContext, gitRepo, pr, "")
+ require.NoError(t, err)
+
+ assert.Equal(t, "Merge pull request 'issue3' (#3) from user2/repo2:branch2 into master", mergeMessage)
+}
diff --git a/services/pull/review.go b/services/pull/review.go
new file mode 100644
index 0000000..927c431
--- /dev/null
+++ b/services/pull/review.go
@@ -0,0 +1,465 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ notify_service "code.gitea.io/gitea/services/notify"
+)
+
+var notEnoughLines = regexp.MustCompile(`fatal: file .* has only \d+ lines?`)
+
+// ErrDismissRequestOnClosedPR represents an error when an user tries to dismiss a review associated to a closed or merged PR.
+type ErrDismissRequestOnClosedPR struct{}
+
+// IsErrDismissRequestOnClosedPR checks if an error is an ErrDismissRequestOnClosedPR.
+func IsErrDismissRequestOnClosedPR(err error) bool {
+ _, ok := err.(ErrDismissRequestOnClosedPR)
+ return ok
+}
+
+func (err ErrDismissRequestOnClosedPR) Error() string {
+ return "can't dismiss a review associated to a closed or merged PR"
+}
+
+func (err ErrDismissRequestOnClosedPR) Unwrap() error {
+ return util.ErrPermissionDenied
+}
+
+// checkInvalidation checks if the line of code comment got changed by another commit.
+// If the line got changed the comment is going to be invalidated.
+func checkInvalidation(ctx context.Context, c *issues_model.Comment, repo *git.Repository, branch string) error {
+ // FIXME differentiate between previous and proposed line
+ commit, err := repo.LineBlame(branch, repo.Path, c.TreePath, uint(c.UnsignedLine()))
+ if err != nil && (strings.Contains(err.Error(), "fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ c.Invalidated = true
+ return issues_model.UpdateCommentInvalidate(ctx, c)
+ }
+ if err != nil {
+ return err
+ }
+ if c.CommitSHA != "" && c.CommitSHA != commit.ID.String() {
+ c.Invalidated = true
+ return issues_model.UpdateCommentInvalidate(ctx, c)
+ }
+ return nil
+}
+
+// InvalidateCodeComments will lookup the prs for code comments which got invalidated by change
+func InvalidateCodeComments(ctx context.Context, prs issues_model.PullRequestList, doer *user_model.User, repo *git.Repository, branch string) error {
+ if len(prs) == 0 {
+ return nil
+ }
+ issueIDs := prs.GetIssueIDs()
+
+ codeComments, err := db.Find[issues_model.Comment](ctx, issues_model.FindCommentsOptions{
+ ListOptions: db.ListOptionsAll,
+ Type: issues_model.CommentTypeCode,
+ Invalidated: optional.Some(false),
+ IssueIDs: issueIDs,
+ })
+ if err != nil {
+ return fmt.Errorf("find code comments: %v", err)
+ }
+ for _, comment := range codeComments {
+ if err := checkInvalidation(ctx, comment, repo, branch); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+// CreateCodeComment creates a comment on the code line
+func CreateCodeComment(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, issue *issues_model.Issue, line int64, content, treePath string, pendingReview bool, replyReviewID int64, latestCommitID string, attachments []string) (*issues_model.Comment, error) {
+ var (
+ existsReview bool
+ err error
+ )
+
+ // CreateCodeComment() is used for:
+ // - Single comments
+ // - Comments that are part of a review
+ // - Comments that reply to an existing review
+
+ if !pendingReview && replyReviewID != 0 {
+ // It's not part of a review; maybe a reply to a review comment or a single comment.
+ // Check if there are reviews for that line already; if there are, this is a reply
+ if existsReview, err = issues_model.ReviewExists(ctx, issue, treePath, line); err != nil {
+ return nil, err
+ }
+ }
+
+ // Comments that are replies don't require a review header to show up in the issue view
+ if !pendingReview && existsReview {
+ if err = issue.LoadRepo(ctx); err != nil {
+ return nil, err
+ }
+
+ comment, err := CreateCodeCommentKnownReviewID(ctx,
+ doer,
+ issue.Repo,
+ issue,
+ content,
+ treePath,
+ line,
+ replyReviewID,
+ attachments,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, comment.Content)
+ if err != nil {
+ return nil, err
+ }
+
+ notify_service.CreateIssueComment(ctx, doer, issue.Repo, issue, comment, mentions)
+
+ return comment, nil
+ }
+
+ review, err := issues_model.GetCurrentReview(ctx, doer, issue)
+ if err != nil {
+ if !issues_model.IsErrReviewNotExist(err) {
+ return nil, err
+ }
+
+ if review, err = issues_model.CreateReview(ctx, issues_model.CreateReviewOptions{
+ Type: issues_model.ReviewTypePending,
+ Reviewer: doer,
+ Issue: issue,
+ Official: false,
+ CommitID: latestCommitID,
+ }); err != nil {
+ return nil, err
+ }
+ }
+
+ comment, err := CreateCodeCommentKnownReviewID(ctx,
+ doer,
+ issue.Repo,
+ issue,
+ content,
+ treePath,
+ line,
+ review.ID,
+ attachments,
+ )
+ if err != nil {
+ return nil, err
+ }
+
+ if !pendingReview && !existsReview {
+ // Submit the review we've just created so the comment shows up in the issue view
+ if _, _, err = SubmitReview(ctx, doer, gitRepo, issue, issues_model.ReviewTypeComment, "", latestCommitID, nil); err != nil {
+ return nil, err
+ }
+ }
+
+ // NOTICE: if it's a pending review the notifications will not be fired until user submit review.
+
+ return comment, nil
+}
+
+// CreateCodeCommentKnownReviewID creates a plain code comment at the specified line / path
+func CreateCodeCommentKnownReviewID(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, treePath string, line, reviewID int64, attachments []string) (*issues_model.Comment, error) {
+ var commitID, patch string
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, fmt.Errorf("LoadPullRequest: %w", err)
+ }
+ pr := issue.PullRequest
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return nil, fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+ gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, pr.BaseRepo)
+ if err != nil {
+ return nil, fmt.Errorf("RepositoryFromContextOrOpen: %w", err)
+ }
+ defer closer.Close()
+
+ invalidated := false
+ head := pr.GetGitRefName()
+ if line > 0 {
+ if reviewID != 0 {
+ first, err := issues_model.FindComments(ctx, &issues_model.FindCommentsOptions{
+ ReviewID: reviewID,
+ Line: line,
+ TreePath: treePath,
+ Type: issues_model.CommentTypeCode,
+ ListOptions: db.ListOptions{
+ PageSize: 1,
+ Page: 1,
+ },
+ })
+ if err == nil && len(first) > 0 {
+ commitID = first[0].CommitSHA
+ invalidated = first[0].Invalidated
+ patch = first[0].Patch
+ } else if err != nil && !issues_model.IsErrCommentNotExist(err) {
+ return nil, fmt.Errorf("Find first comment for %d line %d path %s. Error: %w", reviewID, line, treePath, err)
+ } else {
+ review, err := issues_model.GetReviewByID(ctx, reviewID)
+ if err == nil && len(review.CommitID) > 0 {
+ head = review.CommitID
+ } else if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ return nil, fmt.Errorf("GetReviewByID %d. Error: %w", reviewID, err)
+ }
+ }
+ }
+
+ if len(commitID) == 0 {
+ // FIXME validate treePath
+ // Get latest commit referencing the commented line
+ // No need for get commit for base branch changes
+ commit, err := gitRepo.LineBlame(head, gitRepo.Path, treePath, uint(line))
+ if err == nil {
+ commitID = commit.ID.String()
+ } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitRefName(), gitRepo.Path, treePath, line, err)
+ }
+ }
+ }
+
+ // Only fetch diff if comment is review comment
+ if len(patch) == 0 && reviewID != 0 {
+ headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return nil, fmt.Errorf("GetRefCommitID[%s]: %w", pr.GetGitRefName(), err)
+ }
+ if len(commitID) == 0 {
+ commitID = headCommitID
+ }
+ reader, writer := io.Pipe()
+ defer func() {
+ _ = reader.Close()
+ _ = writer.Close()
+ }()
+ go func() {
+ if err := git.GetRepoRawDiffForFile(gitRepo, pr.MergeBase, headCommitID, git.RawDiffNormal, treePath, writer); err != nil {
+ _ = writer.CloseWithError(fmt.Errorf("GetRawDiffForLine[%s, %s, %s, %s]: %w", gitRepo.Path, pr.MergeBase, headCommitID, treePath, err))
+ return
+ }
+ _ = writer.Close()
+ }()
+
+ patch, err = git.CutDiffAroundLine(reader, int64((&issues_model.Comment{Line: line}).UnsignedLine()), line < 0, setting.UI.CodeCommentLines)
+ if err != nil {
+ log.Error("Error whilst generating patch: %v", err)
+ return nil, err
+ }
+ }
+ return issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Type: issues_model.CommentTypeCode,
+ Doer: doer,
+ Repo: repo,
+ Issue: issue,
+ Content: content,
+ LineNum: line,
+ TreePath: treePath,
+ CommitSHA: commitID,
+ ReviewID: reviewID,
+ Patch: patch,
+ Invalidated: invalidated,
+ Attachments: attachments,
+ })
+}
+
+// SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist
+func SubmitReview(ctx context.Context, doer *user_model.User, gitRepo *git.Repository, issue *issues_model.Issue, reviewType issues_model.ReviewType, content, commitID string, attachmentUUIDs []string) (*issues_model.Review, *issues_model.Comment, error) {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, nil, err
+ }
+
+ pr := issue.PullRequest
+ var stale bool
+ if reviewType != issues_model.ReviewTypeApprove && reviewType != issues_model.ReviewTypeReject {
+ stale = false
+ } else {
+ headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ return nil, nil, err
+ }
+
+ if headCommitID == commitID {
+ stale = false
+ } else {
+ stale, err = checkIfPRContentChanged(ctx, pr, commitID, headCommitID)
+ if err != nil {
+ return nil, nil, err
+ }
+ }
+ }
+
+ review, comm, err := issues_model.SubmitReview(ctx, doer, issue, reviewType, content, commitID, stale, attachmentUUIDs)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, comm.Content)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ notify_service.PullRequestReview(ctx, pr, review, comm, mentions)
+
+ for _, lines := range review.CodeComments {
+ for _, comments := range lines {
+ for _, codeComment := range comments {
+ mentions, err := issues_model.FindAndUpdateIssueMentions(ctx, issue, doer, codeComment.Content)
+ if err != nil {
+ return nil, nil, err
+ }
+ notify_service.PullRequestCodeComment(ctx, pr, codeComment, mentions)
+ }
+ }
+ }
+
+ return review, comm, nil
+}
+
+// DismissApprovalReviews dismiss all approval reviews because of new commits
+func DismissApprovalReviews(ctx context.Context, doer *user_model.User, pull *issues_model.PullRequest) error {
+ reviews, err := issues_model.FindReviews(ctx, issues_model.FindReviewOptions{
+ ListOptions: db.ListOptionsAll,
+ IssueID: pull.IssueID,
+ Types: []issues_model.ReviewType{issues_model.ReviewTypeApprove},
+ Dismissed: optional.Some(false),
+ })
+ if err != nil {
+ return err
+ }
+
+ if err := reviews.LoadIssues(ctx); err != nil {
+ return err
+ }
+
+ return db.WithTx(ctx, func(ctx context.Context) error {
+ for _, review := range reviews {
+ if err := issues_model.DismissReview(ctx, review, true); err != nil {
+ return err
+ }
+
+ comment, err := issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Doer: doer,
+ Content: "New commits pushed, approval review dismissed automatically according to repository settings",
+ Type: issues_model.CommentTypeDismissReview,
+ ReviewID: review.ID,
+ Issue: review.Issue,
+ Repo: review.Issue.Repo,
+ })
+ if err != nil {
+ return err
+ }
+
+ comment.Review = review
+ comment.Poster = doer
+ comment.Issue = review.Issue
+
+ notify_service.PullReviewDismiss(ctx, doer, review, comment)
+ }
+ return nil
+ })
+}
+
+// DismissReview dismissing stale review by repo admin
+func DismissReview(ctx context.Context, reviewID, repoID int64, message string, doer *user_model.User, isDismiss, dismissPriors bool) (comment *issues_model.Comment, err error) {
+ review, err := issues_model.GetReviewByID(ctx, reviewID)
+ if err != nil {
+ return nil, err
+ }
+
+ if review.Type != issues_model.ReviewTypeApprove && review.Type != issues_model.ReviewTypeReject {
+ return nil, fmt.Errorf("not need to dismiss this review because it's type is not Approve or change request")
+ }
+
+ // load data for notify
+ if err := review.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ // Check if the review's repoID is the one we're currently expecting.
+ if review.Issue.RepoID != repoID {
+ return nil, fmt.Errorf("reviews's repository is not the same as the one we expect")
+ }
+
+ issue := review.Issue
+
+ if issue.IsClosed {
+ return nil, ErrDismissRequestOnClosedPR{}
+ }
+
+ if issue.IsPull {
+ if err := issue.LoadPullRequest(ctx); err != nil {
+ return nil, err
+ }
+ if issue.PullRequest.HasMerged {
+ return nil, ErrDismissRequestOnClosedPR{}
+ }
+ }
+
+ if err := issues_model.DismissReview(ctx, review, isDismiss); err != nil {
+ return nil, err
+ }
+
+ if dismissPriors {
+ reviews, err := issues_model.FindReviews(ctx, issues_model.FindReviewOptions{
+ IssueID: review.IssueID,
+ ReviewerID: review.ReviewerID,
+ Dismissed: optional.Some(false),
+ })
+ if err != nil {
+ return nil, err
+ }
+ for _, oldReview := range reviews {
+ if err = issues_model.DismissReview(ctx, oldReview, true); err != nil {
+ return nil, err
+ }
+ }
+ }
+
+ if !isDismiss {
+ return nil, nil
+ }
+
+ if err := review.Issue.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ comment, err = issues_model.CreateComment(ctx, &issues_model.CreateCommentOptions{
+ Doer: doer,
+ Content: message,
+ Type: issues_model.CommentTypeDismissReview,
+ ReviewID: review.ID,
+ Issue: review.Issue,
+ Repo: review.Issue.Repo,
+ })
+ if err != nil {
+ return nil, err
+ }
+
+ comment.Review = review
+ comment.Poster = doer
+ comment.Issue = review.Issue
+
+ notify_service.PullReviewDismiss(ctx, doer, review, comment)
+
+ return comment, nil
+}
diff --git a/services/pull/review_test.go b/services/pull/review_test.go
new file mode 100644
index 0000000..4cb3ad0
--- /dev/null
+++ b/services/pull/review_test.go
@@ -0,0 +1,49 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ pull_service "code.gitea.io/gitea/services/pull"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDismissReview(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ pull := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{})
+ require.NoError(t, pull.LoadIssue(db.DefaultContext))
+ issue := pull.Issue
+ require.NoError(t, issue.LoadRepo(db.DefaultContext))
+ reviewer := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+ review, err := issues_model.CreateReview(db.DefaultContext, issues_model.CreateReviewOptions{
+ Issue: issue,
+ Reviewer: reviewer,
+ Type: issues_model.ReviewTypeReject,
+ })
+
+ require.NoError(t, err)
+ issue.IsClosed = true
+ pull.HasMerged = false
+ require.NoError(t, issues_model.UpdateIssueCols(db.DefaultContext, issue, "is_closed"))
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ _, err = pull_service.DismissReview(db.DefaultContext, review.ID, issue.RepoID, "", &user_model.User{}, false, false)
+ require.Error(t, err)
+ assert.True(t, pull_service.IsErrDismissRequestOnClosedPR(err))
+
+ pull.HasMerged = true
+ pull.Issue.IsClosed = false
+ require.NoError(t, issues_model.UpdateIssueCols(db.DefaultContext, issue, "is_closed"))
+ require.NoError(t, pull.UpdateCols(db.DefaultContext, "has_merged"))
+ _, err = pull_service.DismissReview(db.DefaultContext, review.ID, issue.RepoID, "", &user_model.User{}, false, false)
+ require.Error(t, err)
+ assert.True(t, pull_service.IsErrDismissRequestOnClosedPR(err))
+}
diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go
new file mode 100644
index 0000000..36bdbde
--- /dev/null
+++ b/services/pull/temp_repo.go
@@ -0,0 +1,196 @@
+// Copyright 2019 The Gitea Authors.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+)
+
+// Temporary repos created here use standard branch names to help simplify
+// merging code
+const (
+ baseBranch = "base" // equivalent to pr.BaseBranch
+ trackingBranch = "tracking" // equivalent to pr.HeadBranch
+ stagingBranch = "staging" // this is used for a working branch
+)
+
+type prContext struct {
+ context.Context
+ tmpBasePath string
+ pr *issues_model.PullRequest
+ outbuf *strings.Builder // we keep these around to help reduce needless buffer recreation,
+ errbuf *strings.Builder // any use should be preceded by a Reset and preferably after use
+}
+
+func (ctx *prContext) RunOpts() *git.RunOpts {
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return &git.RunOpts{
+ Dir: ctx.tmpBasePath,
+ Stdout: ctx.outbuf,
+ Stderr: ctx.errbuf,
+ }
+}
+
+// createTemporaryRepoForPR creates a temporary repo with "base" for pr.BaseBranch and "tracking" for pr.HeadBranch
+// it also create a second base branch called "original_base"
+func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest) (prCtx *prContext, cancel context.CancelFunc, err error) {
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("%-v LoadHeadRepo: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v LoadHeadRepo: %w", pr, err)
+ } else if pr.HeadRepo == nil {
+ log.Error("%-v HeadRepo %d does not exist", pr, pr.HeadRepoID)
+ return nil, nil, &repo_model.ErrRepoNotExist{
+ ID: pr.HeadRepoID,
+ }
+ } else if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("%-v LoadBaseRepo: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v LoadBaseRepo: %w", pr, err)
+ } else if pr.BaseRepo == nil {
+ log.Error("%-v BaseRepo %d does not exist", pr, pr.BaseRepoID)
+ return nil, nil, &repo_model.ErrRepoNotExist{
+ ID: pr.BaseRepoID,
+ }
+ } else if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v HeadRepo.LoadOwner: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v HeadRepo.LoadOwner: %w", pr, err)
+ } else if err := pr.BaseRepo.LoadOwner(ctx); err != nil {
+ log.Error("%-v BaseRepo.LoadOwner: %v", pr, err)
+ return nil, nil, fmt.Errorf("%v BaseRepo.LoadOwner: %w", pr, err)
+ }
+
+ // Clone base repo.
+ tmpBasePath, err := repo_module.CreateTemporaryPath("pull")
+ if err != nil {
+ log.Error("CreateTemporaryPath[%-v]: %v", pr, err)
+ return nil, nil, err
+ }
+ prCtx = &prContext{
+ Context: ctx,
+ tmpBasePath: tmpBasePath,
+ pr: pr,
+ outbuf: &strings.Builder{},
+ errbuf: &strings.Builder{},
+ }
+ cancel = func() {
+ if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
+ log.Error("Error whilst removing removing temporary repo for %-v: %v", pr, err)
+ }
+ }
+
+ baseRepoPath := pr.BaseRepo.RepoPath()
+ headRepoPath := pr.HeadRepo.RepoPath()
+
+ if err := git.InitRepository(ctx, tmpBasePath, false, pr.BaseRepo.ObjectFormatName); err != nil {
+ log.Error("Unable to init tmpBasePath for %-v: %v", pr, err)
+ cancel()
+ return nil, nil, err
+ }
+
+ remoteRepoName := "head_repo"
+ baseBranch := "base"
+
+ fetchArgs := git.TrustedCmdArgs{"--no-tags"}
+ if git.CheckGitVersionAtLeast("2.25.0") == nil {
+ // Writing the commit graph can be slow and is not needed here
+ fetchArgs = append(fetchArgs, "--no-write-commit-graph")
+ }
+
+ // addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
+ addCacheRepo := func(repoPath, cacheRepoPath string) error {
+ p := filepath.Join(repoPath, ".git", "objects", "info", "alternates")
+ f, err := os.OpenFile(p, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0o600)
+ if err != nil {
+ log.Error("Could not create .git/objects/info/alternates file in %s: %v", repoPath, err)
+ return err
+ }
+ defer f.Close()
+ data := filepath.Join(cacheRepoPath, "objects")
+ if _, err := fmt.Fprintln(f, data); err != nil {
+ log.Error("Could not write to .git/objects/info/alternates file in %s: %v", repoPath, err)
+ return err
+ }
+ return nil
+ }
+
+ // Add head repo remote.
+ if err := addCacheRepo(tmpBasePath, baseRepoPath); err != nil {
+ log.Error("%-v Unable to add base repository to temporary repo [%s -> %s]: %v", pr, pr.BaseRepo.FullName(), tmpBasePath, err)
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add base repository to temporary repo [%s -> tmpBasePath]: %w", pr.BaseRepo.FullName(), err)
+ }
+
+ if err := git.NewCommand(ctx, "remote", "add", "-t").AddDynamicArguments(pr.BaseBranch).AddArguments("-m").AddDynamicArguments(pr.BaseBranch).AddDynamicArguments("origin", baseRepoPath).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to add base repository as origin [%s -> %s]: %v\n%s\n%s", pr, pr.BaseRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add base repository as origin [%s -> tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := git.NewCommand(ctx, "fetch", "origin").AddArguments(fetchArgs...).AddDashesAndList(pr.BaseBranch+":"+baseBranch, pr.BaseBranch+":original_"+baseBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to fetch origin base branch [%s:%s -> base, original_base in %s]: %v:\n%s\n%s", pr, pr.BaseRepo.FullName(), pr.BaseBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to fetch origin base branch [%s:%s -> base, original_base in tmpBasePath]: %w\n%s\n%s", pr.BaseRepo.FullName(), pr.BaseBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := git.NewCommand(ctx, "symbolic-ref").AddDynamicArguments("HEAD", git.BranchPrefix+baseBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to set HEAD as base branch in [%s]: %v\n%s\n%s", pr, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to set HEAD as base branch in tmpBasePath: %w\n%s\n%s", err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ if err := addCacheRepo(tmpBasePath, headRepoPath); err != nil {
+ log.Error("%-v Unable to add head repository to temporary repo [%s -> %s]: %v", pr, pr.HeadRepo.FullName(), tmpBasePath, err)
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add head base repository to temporary repo [%s -> tmpBasePath]: %w", pr.HeadRepo.FullName(), err)
+ }
+
+ if err := git.NewCommand(ctx, "remote", "add").AddDynamicArguments(remoteRepoName, headRepoPath).
+ Run(prCtx.RunOpts()); err != nil {
+ log.Error("%-v Unable to add head repository as head_repo [%s -> %s]: %v\n%s\n%s", pr, pr.HeadRepo.FullName(), tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ cancel()
+ return nil, nil, fmt.Errorf("Unable to add head repository as head_repo [%s -> tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+
+ trackingBranch := "tracking"
+ objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
+ // Fetch head branch
+ var headBranch string
+ if pr.Flow == issues_model.PullRequestFlowGithub {
+ headBranch = git.BranchPrefix + pr.HeadBranch
+ } else if len(pr.HeadCommitID) == objectFormat.FullLength() { // for not created pull request
+ headBranch = pr.HeadCommitID
+ } else {
+ headBranch = pr.GetGitRefName()
+ }
+ if err := git.NewCommand(ctx, "fetch").AddArguments(fetchArgs...).AddDynamicArguments(remoteRepoName, headBranch+":"+trackingBranch).
+ Run(prCtx.RunOpts()); err != nil {
+ cancel()
+ if !git.IsBranchExist(ctx, pr.HeadRepo.RepoPath(), pr.HeadBranch) {
+ return nil, nil, git_model.ErrBranchNotExist{
+ BranchName: pr.HeadBranch,
+ }
+ }
+ log.Error("%-v Unable to fetch head_repo head branch [%s:%s -> tracking in %s]: %v:\n%s\n%s", pr, pr.HeadRepo.FullName(), pr.HeadBranch, tmpBasePath, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ return nil, nil, fmt.Errorf("Unable to fetch head_repo head branch [%s:%s -> tracking in tmpBasePath]: %w\n%s\n%s", pr.HeadRepo.FullName(), headBranch, err, prCtx.outbuf.String(), prCtx.errbuf.String())
+ }
+ prCtx.outbuf.Reset()
+ prCtx.errbuf.Reset()
+
+ return prCtx, cancel, nil
+}
diff --git a/services/pull/update.go b/services/pull/update.go
new file mode 100644
index 0000000..dbc1b71
--- /dev/null
+++ b/services/pull/update.go
@@ -0,0 +1,180 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/repository"
+)
+
+// Update updates pull request with base branch.
+func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
+ if pr.Flow == issues_model.PullRequestFlowAGit {
+ // TODO: update of agit flow pull request's head branch is unsupported
+ return fmt.Errorf("update of agit flow pull request's head branch is unsupported")
+ }
+
+ pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(pr.ID))
+
+ diffCount, err := GetDiverging(ctx, pr)
+ if err != nil {
+ return err
+ } else if diffCount.Behind == 0 {
+ return fmt.Errorf("HeadBranch of PR %d is up to date", pr.Index)
+ }
+
+ if rebase {
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, pr.BaseRepo.ID, pr.BaseBranch, false, "", "", 0)
+ }()
+
+ return updateHeadByRebaseOnToBase(ctx, pr, doer)
+ }
+
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ log.Error("unable to load BaseRepo for %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load BaseRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+ if pr.HeadRepo == nil {
+ // LoadHeadRepo will swallow ErrRepoNotExist so if pr.HeadRepo is still nil recreate the error
+ err := repo_model.ErrRepoNotExist{
+ ID: pr.HeadRepoID,
+ }
+ log.Error("unable to load HeadRepo for PR %-v during update-by-merge: %v", pr, err)
+ return fmt.Errorf("unable to load HeadRepo for PR[%d] during update-by-merge: %w", pr.ID, err)
+ }
+
+ // use merge functions but switch repos and branches
+ reversePR := &issues_model.PullRequest{
+ ID: pr.ID,
+
+ HeadRepoID: pr.BaseRepoID,
+ HeadRepo: pr.BaseRepo,
+ HeadBranch: pr.BaseBranch,
+
+ BaseRepoID: pr.HeadRepoID,
+ BaseRepo: pr.HeadRepo,
+ BaseBranch: pr.HeadBranch,
+ }
+
+ _, err = doMergeAndPush(ctx, reversePR, doer, repo_model.MergeStyleMerge, "", message, repository.PushTriggerPRUpdateWithBase)
+
+ defer func() {
+ AddTestPullRequestTask(ctx, doer, reversePR.HeadRepo.ID, reversePR.HeadBranch, false, "", "", 0)
+ }()
+
+ return err
+}
+
+// IsUserAllowedToUpdate check if user is allowed to update PR with given permissions and branch protections
+func IsUserAllowedToUpdate(ctx context.Context, pull *issues_model.PullRequest, user *user_model.User) (mergeAllowed, rebaseAllowed bool, err error) {
+ if pull.Flow == issues_model.PullRequestFlowAGit {
+ return false, false, nil
+ }
+
+ if user == nil {
+ return false, false, nil
+ }
+ headRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, user)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ return false, false, nil
+ }
+ return false, false, err
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ return false, false, err
+ }
+
+ pr := &issues_model.PullRequest{
+ HeadRepoID: pull.BaseRepoID,
+ HeadRepo: pull.BaseRepo,
+ BaseRepoID: pull.HeadRepoID,
+ BaseRepo: pull.HeadRepo,
+ HeadBranch: pull.BaseBranch,
+ BaseBranch: pull.HeadBranch,
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pr.BaseRepoID, pr.BaseBranch)
+ if err != nil {
+ return false, false, err
+ }
+
+ // can't do rebase on protected branch because need force push
+ if pb == nil {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return false, false, err
+ }
+ prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ return false, false, nil
+ }
+ log.Error("pr.BaseRepo.GetUnit(unit.TypePullRequests): %v", err)
+ return false, false, err
+ }
+ rebaseAllowed = prUnit.PullRequestsConfig().AllowRebaseUpdate
+ }
+
+ // Update function need push permission
+ if pb != nil {
+ pb.Repo = pull.BaseRepo
+ if !pb.CanUserPush(ctx, user) {
+ return false, false, nil
+ }
+ }
+
+ baseRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.BaseRepo, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ mergeAllowed, err = IsUserAllowedToMerge(ctx, pr, headRepoPerm, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ if pull.AllowMaintainerEdit {
+ mergeAllowedMaintainer, err := IsUserAllowedToMerge(ctx, pr, baseRepoPerm, user)
+ if err != nil {
+ return false, false, err
+ }
+
+ mergeAllowed = mergeAllowed || mergeAllowedMaintainer
+ }
+
+ return mergeAllowed, rebaseAllowed, nil
+}
+
+// GetDiverging determines how many commits a PR is ahead or behind the PR base branch
+func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.DivergeObject, error) {
+ log.Trace("GetDiverging[%-v]: compare commits", pr)
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ if !git_model.IsErrBranchNotExist(err) {
+ log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
+ }
+ return nil, err
+ }
+ defer cancel()
+
+ diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ return &diff, err
+}
diff --git a/services/pull/update_rebase.go b/services/pull/update_rebase.go
new file mode 100644
index 0000000..3e2a7be
--- /dev/null
+++ b/services/pull/update_rebase.go
@@ -0,0 +1,107 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package pull
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+// updateHeadByRebaseOnToBase handles updating a PR's head branch by rebasing it on the PR current base branch
+func updateHeadByRebaseOnToBase(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User) error {
+ // "Clone" base repo and add the cache headers for the head repo and branch
+ mergeCtx, cancel, err := createTemporaryRepoForMerge(ctx, pr, doer, "")
+ if err != nil {
+ return err
+ }
+ defer cancel()
+
+ // Determine the old merge-base before the rebase - we use this for LFS push later on
+ oldMergeBase, _, _ := git.NewCommand(ctx, "merge-base").AddDashesAndList(baseBranch, trackingBranch).RunStdString(&git.RunOpts{Dir: mergeCtx.tmpBasePath})
+ oldMergeBase = strings.TrimSpace(oldMergeBase)
+
+ // Rebase the tracking branch on to the base as the staging branch
+ if err := rebaseTrackingOnToBase(mergeCtx, repo_model.MergeStyleRebaseUpdate); err != nil {
+ return err
+ }
+
+ if setting.LFS.StartServer {
+ // Now we need to ensure that the head repository contains any LFS objects between the new base and the old mergebase
+ // It's questionable about where this should go - either after or before the push
+ // I think in the interests of data safety - failures to push to the lfs should prevent
+ // the push as you can always re-rebase.
+ if err := LFSPush(ctx, mergeCtx.tmpBasePath, baseBranch, oldMergeBase, &issues_model.PullRequest{
+ HeadRepoID: pr.BaseRepoID,
+ BaseRepoID: pr.HeadRepoID,
+ }); err != nil {
+ log.Error("Unable to push lfs objects between %s and %s up to head branch in %-v: %v", baseBranch, oldMergeBase, pr, err)
+ return err
+ }
+ }
+
+ // Now determine who the pushing author should be
+ var headUser *user_model.User
+ if err := pr.HeadRepo.LoadOwner(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ log.Error("Can't find user: %d for head repository in %-v - %v", pr.HeadRepo.OwnerID, pr, err)
+ return err
+ }
+ log.Error("Can't find user: %d for head repository in %-v - defaulting to doer: %-v - %v", pr.HeadRepo.OwnerID, pr, doer, err)
+ headUser = doer
+ } else {
+ headUser = pr.HeadRepo.Owner
+ }
+
+ pushCmd := git.NewCommand(ctx, "push", "-f", "head_repo").
+ AddDynamicArguments(stagingBranch + ":" + git.BranchPrefix + pr.HeadBranch)
+
+ // Push back to the head repository.
+ // TODO: this cause an api call to "/api/internal/hook/post-receive/...",
+ // that prevents us from doint the whole merge in one db transaction
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ if err := pushCmd.Run(&git.RunOpts{
+ Env: repo_module.FullPushingEnvironment(
+ headUser,
+ doer,
+ pr.HeadRepo,
+ pr.HeadRepo.Name,
+ pr.ID,
+ ),
+ Dir: mergeCtx.tmpBasePath,
+ Stdout: mergeCtx.outbuf,
+ Stderr: mergeCtx.errbuf,
+ }); err != nil {
+ if strings.Contains(mergeCtx.errbuf.String(), "non-fast-forward") {
+ return &git.ErrPushOutOfDate{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ } else if strings.Contains(mergeCtx.errbuf.String(), "! [remote rejected]") {
+ err := &git.ErrPushRejected{
+ StdOut: mergeCtx.outbuf.String(),
+ StdErr: mergeCtx.errbuf.String(),
+ Err: err,
+ }
+ err.GenerateMessage()
+ return err
+ }
+ return fmt.Errorf("git push: %s", mergeCtx.errbuf.String())
+ }
+ mergeCtx.outbuf.Reset()
+ mergeCtx.errbuf.Reset()
+
+ return nil
+}