summaryrefslogtreecommitdiffstats
path: root/routers/web/repo
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
commitdd136858f1ea40ad3c94191d647487fa4f31926c (patch)
tree58fec94a7b2a12510c9664b21793f1ed560c6518 /routers/web/repo
parentInitial commit. (diff)
downloadforgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.tar.xz
forgejo-dd136858f1ea40ad3c94191d647487fa4f31926c.zip
Adding upstream version 9.0.0.
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to '')
-rw-r--r--routers/web/repo/actions/actions.go247
-rw-r--r--routers/web/repo/actions/manual.go62
-rw-r--r--routers/web/repo/actions/view.go781
-rw-r--r--routers/web/repo/activity.go105
-rw-r--r--routers/web/repo/attachment.go163
-rw-r--r--routers/web/repo/badges/badges.go164
-rw-r--r--routers/web/repo/blame.go298
-rw-r--r--routers/web/repo/branch.go262
-rw-r--r--routers/web/repo/cherry_pick.go192
-rw-r--r--routers/web/repo/code_frequency.go41
-rw-r--r--routers/web/repo/commit.go468
-rw-r--r--routers/web/repo/compare.go972
-rw-r--r--routers/web/repo/contributors.go38
-rw-r--r--routers/web/repo/download.go170
-rw-r--r--routers/web/repo/editor.go962
-rw-r--r--routers/web/repo/editor_test.go73
-rw-r--r--routers/web/repo/find.go24
-rw-r--r--routers/web/repo/flags/manage.go49
-rw-r--r--routers/web/repo/githttp.go599
-rw-r--r--routers/web/repo/githttp_test.go42
-rw-r--r--routers/web/repo/helper.go44
-rw-r--r--routers/web/repo/helper_test.go26
-rw-r--r--routers/web/repo/issue.go3822
-rw-r--r--routers/web/repo/issue_content_history.go237
-rw-r--r--routers/web/repo/issue_dependency.go144
-rw-r--r--routers/web/repo/issue_label.go229
-rw-r--r--routers/web/repo/issue_label_test.go173
-rw-r--r--routers/web/repo/issue_lock.go65
-rw-r--r--routers/web/repo/issue_pin.go107
-rw-r--r--routers/web/repo/issue_stopwatch.go113
-rw-r--r--routers/web/repo/issue_test.go375
-rw-r--r--routers/web/repo/issue_timetrack.go87
-rw-r--r--routers/web/repo/issue_watch.go63
-rw-r--r--routers/web/repo/main_test.go14
-rw-r--r--routers/web/repo/middlewares.go120
-rw-r--r--routers/web/repo/migrate.go310
-rw-r--r--routers/web/repo/milestone.go304
-rw-r--r--routers/web/repo/packages.go78
-rw-r--r--routers/web/repo/patch.go124
-rw-r--r--routers/web/repo/projects.go670
-rw-r--r--routers/web/repo/projects_test.go27
-rw-r--r--routers/web/repo/pull.go1838
-rw-r--r--routers/web/repo/pull_review.go316
-rw-r--r--routers/web/repo/pull_review_test.go104
-rw-r--r--routers/web/repo/recent_commits.go41
-rw-r--r--routers/web/repo/release.go857
-rw-r--r--routers/web/repo/release_test.go124
-rw-r--r--routers/web/repo/render.go76
-rw-r--r--routers/web/repo/repo.go774
-rw-r--r--routers/web/repo/search.go105
-rw-r--r--routers/web/repo/setting/avatar.go76
-rw-r--r--routers/web/repo/setting/collaboration.go217
-rw-r--r--routers/web/repo/setting/default_branch.go54
-rw-r--r--routers/web/repo/setting/deploy_key.go109
-rw-r--r--routers/web/repo/setting/git_hooks.go65
-rw-r--r--routers/web/repo/setting/lfs.go562
-rw-r--r--routers/web/repo/setting/main_test.go14
-rw-r--r--routers/web/repo/setting/protected_branch.go347
-rw-r--r--routers/web/repo/setting/protected_tag.go188
-rw-r--r--routers/web/repo/setting/runners.go187
-rw-r--r--routers/web/repo/setting/secrets.go127
-rw-r--r--routers/web/repo/setting/setting.go1115
-rw-r--r--routers/web/repo/setting/settings_test.go412
-rw-r--r--routers/web/repo/setting/variables.go140
-rw-r--r--routers/web/repo/setting/webhook.go485
-rw-r--r--routers/web/repo/topic.go60
-rw-r--r--routers/web/repo/treelist.go54
-rw-r--r--routers/web/repo/view.go1258
-rw-r--r--routers/web/repo/view_test.go62
-rw-r--r--routers/web/repo/wiki.go816
-rw-r--r--routers/web/repo/wiki_test.go224
71 files changed, 23651 insertions, 0 deletions
diff --git a/routers/web/repo/actions/actions.go b/routers/web/repo/actions/actions.go
new file mode 100644
index 0000000..ff3b161
--- /dev/null
+++ b/routers/web/repo/actions/actions.go
@@ -0,0 +1,247 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "slices"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/model"
+)
+
+const (
+ tplListActions base.TplName = "repo/actions/list"
+ tplViewActions base.TplName = "repo/actions/view"
+)
+
+type Workflow struct {
+ Entry git.TreeEntry
+ ErrMsg string
+}
+
+// MustEnableActions check if actions are enabled in settings
+func MustEnableActions(ctx *context.Context) {
+ if !setting.Actions.Enabled {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+
+ if unit.TypeActions.UnitGlobalDisabled() {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+
+ if ctx.Repo.Repository != nil {
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+ }
+}
+
+func List(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageIsActions"] = true
+
+ curWorkflow := ctx.FormString("workflow")
+ ctx.Data["CurWorkflow"] = curWorkflow
+
+ var workflows []Workflow
+ if empty, err := ctx.Repo.GitRepo.IsEmpty(); err != nil {
+ ctx.ServerError("IsEmpty", err)
+ return
+ } else if !empty {
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("GetBranchCommit", err)
+ return
+ }
+ entries, err := actions.ListWorkflows(commit)
+ if err != nil {
+ ctx.ServerError("ListWorkflows", err)
+ return
+ }
+
+ // Get all runner labels
+ runners, err := db.Find[actions_model.ActionRunner](ctx, actions_model.FindRunnerOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsOnline: optional.Some(true),
+ WithAvailable: true,
+ })
+ if err != nil {
+ ctx.ServerError("FindRunners", err)
+ return
+ }
+ allRunnerLabels := make(container.Set[string])
+ for _, r := range runners {
+ allRunnerLabels.AddMultiple(r.AgentLabels...)
+ }
+
+ canRun := ctx.Repo.CanWrite(unit.TypeActions)
+
+ workflows = make([]Workflow, 0, len(entries))
+ for _, entry := range entries {
+ workflow := Workflow{Entry: *entry}
+ content, err := actions.GetContentFromEntry(entry)
+ if err != nil {
+ ctx.ServerError("GetContentFromEntry", err)
+ return
+ }
+ wf, err := model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.invalid_workflow_helper", err.Error())
+ workflows = append(workflows, workflow)
+ continue
+ }
+ // The workflow must contain at least one job without "needs". Otherwise, a deadlock will occur and no jobs will be able to run.
+ hasJobWithoutNeeds := false
+ // Check whether have matching runner and a job without "needs"
+ emptyJobsNumber := 0
+ for _, j := range wf.Jobs {
+ if j == nil {
+ emptyJobsNumber++
+ continue
+ }
+ if !hasJobWithoutNeeds && len(j.Needs()) == 0 {
+ hasJobWithoutNeeds = true
+ }
+ runsOnList := j.RunsOn()
+ for _, ro := range runsOnList {
+ if strings.Contains(ro, "${{") {
+ // Skip if it contains expressions.
+ // The expressions could be very complex and could not be evaluated here,
+ // so just skip it, it's OK since it's just a tooltip message.
+ continue
+ }
+ if !allRunnerLabels.Contains(ro) {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_matching_online_runner_helper", ro)
+ break
+ }
+ }
+ if workflow.ErrMsg != "" {
+ break
+ }
+ }
+ if !hasJobWithoutNeeds {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_job_without_needs")
+ }
+ if emptyJobsNumber == len(wf.Jobs) {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_job")
+ }
+ workflows = append(workflows, workflow)
+
+ if canRun && workflow.Entry.Name() == curWorkflow {
+ config := wf.WorkflowDispatchConfig()
+ if config != nil {
+ keys := util.KeysOfMap(config.Inputs)
+ slices.Sort(keys)
+ if int64(len(config.Inputs)) > setting.Actions.LimitDispatchInputs {
+ keys = keys[:setting.Actions.LimitDispatchInputs]
+ }
+
+ ctx.Data["CurWorkflowDispatch"] = config
+ ctx.Data["CurWorkflowDispatchInputKeys"] = keys
+ ctx.Data["WarnDispatchInputsLimit"] = int64(len(config.Inputs)) > setting.Actions.LimitDispatchInputs
+ ctx.Data["DispatchInputsLimit"] = setting.Actions.LimitDispatchInputs
+ }
+ }
+ }
+ }
+ ctx.Data["workflows"] = workflows
+ ctx.Data["RepoLink"] = ctx.Repo.Repository.Link()
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ actorID := ctx.FormInt64("actor")
+ status := ctx.FormInt("status")
+
+ actionsConfig := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions).ActionsConfig()
+ ctx.Data["ActionsConfig"] = actionsConfig
+
+ if len(curWorkflow) > 0 && ctx.Repo.IsAdmin() {
+ ctx.Data["AllowDisableOrEnableWorkflow"] = true
+ ctx.Data["CurWorkflowDisabled"] = actionsConfig.IsWorkflowDisabled(curWorkflow)
+ }
+
+ // if status or actor query param is not given to frontend href, (href="/<repoLink>/actions")
+ // they will be 0 by default, which indicates get all status or actors
+ ctx.Data["CurActor"] = actorID
+ ctx.Data["CurStatus"] = status
+ if actorID > 0 || status > int(actions_model.StatusUnknown) {
+ ctx.Data["IsFiltered"] = true
+ }
+
+ opts := actions_model.FindRunOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ RepoID: ctx.Repo.Repository.ID,
+ WorkflowID: curWorkflow,
+ TriggerUserID: actorID,
+ }
+
+ // if status is not StatusUnknown, it means user has selected a status filter
+ if actions_model.Status(status) != actions_model.StatusUnknown {
+ opts.Status = []actions_model.Status{actions_model.Status(status)}
+ }
+
+ runs, total, err := db.FindAndCount[actions_model.ActionRun](ctx, opts)
+ if err != nil {
+ ctx.ServerError("FindAndCount", err)
+ return
+ }
+
+ for _, run := range runs {
+ run.Repo = ctx.Repo.Repository
+ }
+
+ if err := actions_model.RunList(runs).LoadTriggerUser(ctx); err != nil {
+ ctx.ServerError("LoadTriggerUser", err)
+ return
+ }
+
+ ctx.Data["Runs"] = runs
+
+ ctx.Data["Repo"] = ctx.Repo
+
+ actors, err := actions_model.GetActors(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetActors", err)
+ return
+ }
+ ctx.Data["Actors"] = repo.MakeSelfOnTop(ctx.Doer, actors)
+
+ ctx.Data["StatusInfoList"] = actions_model.GetStatusInfoList(ctx)
+
+ pager := context.NewPagination(int(total), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParamString("workflow", curWorkflow)
+ pager.AddParamString("actor", fmt.Sprint(actorID))
+ pager.AddParamString("status", fmt.Sprint(status))
+ ctx.Data["Page"] = pager
+ ctx.Data["HasWorkflowsOrRuns"] = len(workflows) > 0 || len(runs) > 0
+
+ ctx.HTML(http.StatusOK, tplListActions)
+}
diff --git a/routers/web/repo/actions/manual.go b/routers/web/repo/actions/manual.go
new file mode 100644
index 0000000..86a6014
--- /dev/null
+++ b/routers/web/repo/actions/manual.go
@@ -0,0 +1,62 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "net/url"
+
+ actions_service "code.gitea.io/gitea/services/actions"
+ context_module "code.gitea.io/gitea/services/context"
+)
+
+func ManualRunWorkflow(ctx *context_module.Context) {
+ workflowID := ctx.FormString("workflow")
+ if len(workflowID) == 0 {
+ ctx.ServerError("workflow", nil)
+ return
+ }
+
+ ref := ctx.FormString("ref")
+ if len(ref) == 0 {
+ ctx.ServerError("ref", nil)
+ return
+ }
+
+ if empty, err := ctx.Repo.GitRepo.IsEmpty(); err != nil {
+ ctx.ServerError("IsEmpty", err)
+ return
+ } else if empty {
+ ctx.NotFound("IsEmpty", nil)
+ return
+ }
+
+ workflow, err := actions_service.GetWorkflowFromCommit(ctx.Repo.GitRepo, ref, workflowID)
+ if err != nil {
+ ctx.ServerError("GetWorkflowFromCommit", err)
+ return
+ }
+
+ location := ctx.Repo.RepoLink + "/actions?workflow=" + url.QueryEscape(workflowID) +
+ "&actor=" + url.QueryEscape(ctx.FormString("actor")) +
+ "&status=" + url.QueryEscape(ctx.FormString("status"))
+
+ formKeyGetter := func(key string) string {
+ formKey := "inputs[" + key + "]"
+ return ctx.FormString(formKey)
+ }
+
+ if err := workflow.Dispatch(ctx, formKeyGetter, ctx.Repo.Repository, ctx.Doer); err != nil {
+ if actions_service.IsInputRequiredErr(err) {
+ ctx.Flash.Error(ctx.Locale.Tr("actions.workflow.dispatch.input_required", err.(actions_service.InputRequiredErr).Name))
+ ctx.Redirect(location)
+ return
+ }
+ ctx.ServerError("workflow.Dispatch", err)
+ return
+ }
+
+ // forward to the page of the run which was just created
+ ctx.Flash.Info(ctx.Locale.Tr("actions.workflow.dispatch.success"))
+ ctx.Redirect(location)
+}
diff --git a/routers/web/repo/actions/view.go b/routers/web/repo/actions/view.go
new file mode 100644
index 0000000..bc1ecbf
--- /dev/null
+++ b/routers/web/repo/actions/view.go
@@ -0,0 +1,781 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "archive/zip"
+ "compress/gzip"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/common"
+ actions_service "code.gitea.io/gitea/services/actions"
+ context_module "code.gitea.io/gitea/services/context"
+
+ "xorm.io/builder"
+)
+
+func View(ctx *context_module.Context) {
+ ctx.Data["PageIsActions"] = true
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ job, _ := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+
+ workflowName := job.Run.WorkflowID
+
+ ctx.Data["RunIndex"] = runIndex
+ ctx.Data["JobIndex"] = jobIndex
+ ctx.Data["ActionsURL"] = ctx.Repo.RepoLink + "/actions"
+ ctx.Data["WorkflowName"] = workflowName
+ ctx.Data["WorkflowURL"] = ctx.Repo.RepoLink + "/actions?workflow=" + workflowName
+
+ ctx.HTML(http.StatusOK, tplViewActions)
+}
+
+func ViewLatest(ctx *context_module.Context) {
+ run, err := actions_model.GetLatestRun(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.NotFound("GetLatestRun", err)
+ return
+ }
+ err = run.LoadAttributes(ctx)
+ if err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Redirect(run.HTMLURL(), http.StatusTemporaryRedirect)
+}
+
+func ViewLatestWorkflowRun(ctx *context_module.Context) {
+ branch := ctx.FormString("branch")
+ if branch == "" {
+ branch = ctx.Repo.Repository.DefaultBranch
+ }
+ branch = fmt.Sprintf("refs/heads/%s", branch)
+ event := ctx.FormString("event")
+
+ workflowFile := ctx.Params("workflow_name")
+ run, err := actions_model.GetLatestRunForBranchAndWorkflow(ctx, ctx.Repo.Repository.ID, branch, workflowFile, event)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.NotFound("GetLatestRunForBranchAndWorkflow", err)
+ } else {
+ ctx.ServerError("GetLatestRunForBranchAndWorkflow", err)
+ }
+ return
+ }
+
+ err = run.LoadAttributes(ctx)
+ if err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Redirect(run.HTMLURL(), http.StatusTemporaryRedirect)
+}
+
+type ViewRequest struct {
+ LogCursors []struct {
+ Step int `json:"step"`
+ Cursor int64 `json:"cursor"`
+ Expanded bool `json:"expanded"`
+ } `json:"logCursors"`
+}
+
+type ViewResponse struct {
+ State struct {
+ Run struct {
+ Link string `json:"link"`
+ Title string `json:"title"`
+ Status string `json:"status"`
+ CanCancel bool `json:"canCancel"`
+ CanApprove bool `json:"canApprove"` // the run needs an approval and the doer has permission to approve
+ CanRerun bool `json:"canRerun"`
+ CanDeleteArtifact bool `json:"canDeleteArtifact"`
+ Done bool `json:"done"`
+ Jobs []*ViewJob `json:"jobs"`
+ Commit ViewCommit `json:"commit"`
+ } `json:"run"`
+ CurrentJob struct {
+ Title string `json:"title"`
+ Detail string `json:"detail"`
+ Steps []*ViewJobStep `json:"steps"`
+ } `json:"currentJob"`
+ } `json:"state"`
+ Logs struct {
+ StepsLog []*ViewStepLog `json:"stepsLog"`
+ } `json:"logs"`
+}
+
+type ViewJob struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+ CanRerun bool `json:"canRerun"`
+ Duration string `json:"duration"`
+}
+
+type ViewCommit struct {
+ LocaleCommit string `json:"localeCommit"`
+ LocalePushedBy string `json:"localePushedBy"`
+ LocaleWorkflow string `json:"localeWorkflow"`
+ ShortSha string `json:"shortSHA"`
+ Link string `json:"link"`
+ Pusher ViewUser `json:"pusher"`
+ Branch ViewBranch `json:"branch"`
+}
+
+type ViewUser struct {
+ DisplayName string `json:"displayName"`
+ Link string `json:"link"`
+}
+
+type ViewBranch struct {
+ Name string `json:"name"`
+ Link string `json:"link"`
+}
+
+type ViewJobStep struct {
+ Summary string `json:"summary"`
+ Duration string `json:"duration"`
+ Status string `json:"status"`
+}
+
+type ViewStepLog struct {
+ Step int `json:"step"`
+ Cursor int64 `json:"cursor"`
+ Lines []*ViewStepLogLine `json:"lines"`
+ Started int64 `json:"started"`
+}
+
+type ViewStepLogLine struct {
+ Index int64 `json:"index"`
+ Message string `json:"message"`
+ Timestamp float64 `json:"timestamp"`
+}
+
+func ViewPost(ctx *context_module.Context) {
+ req := web.GetForm(ctx).(*ViewRequest)
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ current, jobs := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+ run := current.Run
+ if err := run.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ resp := &ViewResponse{}
+
+ resp.State.Run.Title = run.Title
+ resp.State.Run.Link = run.Link()
+ resp.State.Run.CanCancel = !run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanApprove = run.NeedApproval && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanRerun = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanDeleteArtifact = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.Done = run.Status.IsDone()
+ resp.State.Run.Jobs = make([]*ViewJob, 0, len(jobs)) // marshal to '[]' instead of 'null' in json
+ resp.State.Run.Status = run.Status.String()
+ for _, v := range jobs {
+ resp.State.Run.Jobs = append(resp.State.Run.Jobs, &ViewJob{
+ ID: v.ID,
+ Name: v.Name,
+ Status: v.Status.String(),
+ CanRerun: v.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions),
+ Duration: v.Duration().String(),
+ })
+ }
+
+ pusher := ViewUser{
+ DisplayName: run.TriggerUser.GetDisplayName(),
+ Link: run.TriggerUser.HomeLink(),
+ }
+ branch := ViewBranch{
+ Name: run.PrettyRef(),
+ Link: run.RefLink(),
+ }
+ resp.State.Run.Commit = ViewCommit{
+ LocaleCommit: ctx.Locale.TrString("actions.runs.commit"),
+ LocalePushedBy: ctx.Locale.TrString("actions.runs.pushed_by"),
+ LocaleWorkflow: ctx.Locale.TrString("actions.runs.workflow"),
+ ShortSha: base.ShortSha(run.CommitSHA),
+ Link: fmt.Sprintf("%s/commit/%s", run.Repo.Link(), run.CommitSHA),
+ Pusher: pusher,
+ Branch: branch,
+ }
+
+ var task *actions_model.ActionTask
+ if current.TaskID > 0 {
+ var err error
+ task, err = actions_model.GetTaskByID(ctx, current.TaskID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ task.Job = current
+ if err := task.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ resp.State.CurrentJob.Title = current.Name
+ resp.State.CurrentJob.Detail = current.Status.LocaleString(ctx.Locale)
+ if run.NeedApproval {
+ resp.State.CurrentJob.Detail = ctx.Locale.TrString("actions.need_approval_desc")
+ }
+ resp.State.CurrentJob.Steps = make([]*ViewJobStep, 0) // marshal to '[]' instead of 'null' in json
+ resp.Logs.StepsLog = make([]*ViewStepLog, 0) // marshal to '[]' instead of 'null' in json
+ if task != nil {
+ steps := actions.FullSteps(task)
+
+ for _, v := range steps {
+ resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &ViewJobStep{
+ Summary: v.Name,
+ Duration: v.Duration().String(),
+ Status: v.Status.String(),
+ })
+ }
+
+ for _, cursor := range req.LogCursors {
+ if !cursor.Expanded {
+ continue
+ }
+
+ step := steps[cursor.Step]
+
+ // if task log is expired, return a consistent log line
+ if task.LogExpired {
+ if cursor.Cursor == 0 {
+ resp.Logs.StepsLog = append(resp.Logs.StepsLog, &ViewStepLog{
+ Step: cursor.Step,
+ Cursor: 1,
+ Lines: []*ViewStepLogLine{
+ {
+ Index: 1,
+ Message: ctx.Locale.TrString("actions.runs.expire_log_message"),
+ // Timestamp doesn't mean anything when the log is expired.
+ // Set it to the task's updated time since it's probably the time when the log has expired.
+ Timestamp: float64(task.Updated.AsTime().UnixNano()) / float64(time.Second),
+ },
+ },
+ Started: int64(step.Started),
+ })
+ }
+ continue
+ }
+
+ logLines := make([]*ViewStepLogLine, 0) // marshal to '[]' instead of 'null' in json
+
+ index := step.LogIndex + cursor.Cursor
+ validCursor := cursor.Cursor >= 0 &&
+ // !(cursor.Cursor < step.LogLength) when the frontend tries to fetch next line before it's ready.
+ // So return the same cursor and empty lines to let the frontend retry.
+ cursor.Cursor < step.LogLength &&
+ // !(index < task.LogIndexes[index]) when task data is older than step data.
+ // It can be fixed by making sure write/read tasks and steps in the same transaction,
+ // but it's easier to just treat it as fetching the next line before it's ready.
+ index < int64(len(task.LogIndexes))
+
+ if validCursor {
+ length := step.LogLength - cursor.Cursor
+ offset := task.LogIndexes[index]
+ var err error
+ logRows, err := actions.ReadLogs(ctx, task.LogInStorage, task.LogFilename, offset, length)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ for i, row := range logRows {
+ logLines = append(logLines, &ViewStepLogLine{
+ Index: cursor.Cursor + int64(i) + 1, // start at 1
+ Message: row.Content,
+ Timestamp: float64(row.Time.AsTime().UnixNano()) / float64(time.Second),
+ })
+ }
+ }
+
+ resp.Logs.StepsLog = append(resp.Logs.StepsLog, &ViewStepLog{
+ Step: cursor.Step,
+ Cursor: cursor.Cursor + int64(len(logLines)),
+ Lines: logLines,
+ Started: int64(step.Started),
+ })
+ }
+ }
+
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// Rerun will rerun jobs in the given run
+// If jobIndexStr is a blank string, it means rerun all jobs
+func Rerun(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ jobIndexStr := ctx.Params("job")
+ var jobIndex int64
+ if jobIndexStr != "" {
+ jobIndex, _ = strconv.ParseInt(jobIndexStr, 10, 64)
+ }
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ // can not rerun job when workflow is disabled
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+ if cfg.IsWorkflowDisabled(run.WorkflowID) {
+ ctx.JSONError(ctx.Locale.Tr("actions.workflow.disabled"))
+ return
+ }
+
+ // reset run's start and stop time when it is done
+ if run.Status.IsDone() {
+ run.PreviousDuration = run.Duration()
+ run.Started = 0
+ run.Stopped = 0
+ if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "previous_duration"); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ job, jobs := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+
+ if jobIndexStr == "" { // rerun all jobs
+ for _, j := range jobs {
+ // if the job has needs, it should be set to "blocked" status to wait for other jobs
+ shouldBlock := len(j.Needs) > 0
+ if err := rerunJob(ctx, j, shouldBlock); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+ ctx.JSON(http.StatusOK, struct{}{})
+ return
+ }
+
+ rerunJobs := actions_service.GetAllRerunJobs(job, jobs)
+
+ for _, j := range rerunJobs {
+ // jobs other than the specified one should be set to "blocked" status
+ shouldBlock := j.JobID != job.JobID
+ if err := rerunJob(ctx, j, shouldBlock); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func rerunJob(ctx *context_module.Context, job *actions_model.ActionRunJob, shouldBlock bool) error {
+ status := job.Status
+ if !status.IsDone() {
+ return nil
+ }
+
+ job.TaskID = 0
+ job.Status = actions_model.StatusWaiting
+ if shouldBlock {
+ job.Status = actions_model.StatusBlocked
+ }
+ job.Started = 0
+ job.Stopped = 0
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ _, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": status}, "task_id", "status", "started", "stopped")
+ return err
+ }); err != nil {
+ return err
+ }
+
+ actions_service.CreateCommitStatus(ctx, job)
+ return nil
+}
+
+func Logs(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ job, _ := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+ if job.TaskID == 0 {
+ ctx.Error(http.StatusNotFound, "job is not started")
+ return
+ }
+
+ err := job.LoadRun(ctx)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ task, err := actions_model.GetTaskByID(ctx, job.TaskID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if task.LogExpired {
+ ctx.Error(http.StatusNotFound, "logs have been cleaned up")
+ return
+ }
+
+ reader, err := actions.OpenLogs(ctx, task.LogInStorage, task.LogFilename)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ defer reader.Close()
+
+ workflowName := job.Run.WorkflowID
+ if p := strings.Index(workflowName, "."); p > 0 {
+ workflowName = workflowName[0:p]
+ }
+ ctx.ServeContent(reader, &context_module.ServeHeaderOptions{
+ Filename: fmt.Sprintf("%v-%v-%v.log", workflowName, job.Name, task.ID),
+ ContentLength: &task.LogSize,
+ ContentType: "text/plain",
+ ContentTypeCharset: "utf-8",
+ Disposition: "attachment",
+ })
+}
+
+func Cancel(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+
+ _, jobs := getRunJobs(ctx, runIndex, -1)
+ if ctx.Written() {
+ return
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ for _, job := range jobs {
+ status := job.Status
+ if status.IsDone() {
+ continue
+ }
+ if job.TaskID == 0 {
+ job.Status = actions_model.StatusCancelled
+ job.Stopped = timeutil.TimeStampNow()
+ n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
+ if err != nil {
+ return err
+ }
+ if n == 0 {
+ return fmt.Errorf("job has changed, try again")
+ }
+ continue
+ }
+ if err := actions_model.StopTask(ctx, job.TaskID, actions_model.StatusCancelled); err != nil {
+ return err
+ }
+ }
+ return nil
+ }); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ actions_service.CreateCommitStatus(ctx, jobs...)
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func Approve(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+
+ current, jobs := getRunJobs(ctx, runIndex, -1)
+ if ctx.Written() {
+ return
+ }
+ run := current.Run
+ doer := ctx.Doer
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ run.NeedApproval = false
+ run.ApprovedBy = doer.ID
+ if err := actions_model.UpdateRun(ctx, run, "need_approval", "approved_by"); err != nil {
+ return err
+ }
+ for _, job := range jobs {
+ if len(job.Needs) == 0 && job.Status.IsBlocked() {
+ job.Status = actions_model.StatusWaiting
+ _, err := actions_model.UpdateRunJob(ctx, job, nil, "status")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ actions_service.CreateCommitStatus(ctx, jobs...)
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+// getRunJobs gets the jobs of runIndex, and returns jobs[jobIndex], jobs.
+// Any error will be written to the ctx.
+// It never returns a nil job of an empty jobs, if the jobIndex is out of range, it will be treated as 0.
+func getRunJobs(ctx *context_module.Context, runIndex, jobIndex int64) (*actions_model.ActionRunJob, []*actions_model.ActionRunJob) {
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return nil, nil
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return nil, nil
+ }
+ run.Repo = ctx.Repo.Repository
+
+ jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return nil, nil
+ }
+ if len(jobs) == 0 {
+ ctx.Error(http.StatusNotFound)
+ return nil, nil
+ }
+
+ for _, v := range jobs {
+ v.Run = run
+ }
+
+ if jobIndex >= 0 && jobIndex < int64(len(jobs)) {
+ return jobs[jobIndex], jobs
+ }
+ return jobs[0], jobs
+}
+
+type ArtifactsViewResponse struct {
+ Artifacts []*ArtifactsViewItem `json:"artifacts"`
+}
+
+type ArtifactsViewItem struct {
+ Name string `json:"name"`
+ Size int64 `json:"size"`
+ Status string `json:"status"`
+}
+
+func ArtifactsView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ artifacts, err := actions_model.ListUploadedArtifactsMeta(ctx, run.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ artifactsResponse := ArtifactsViewResponse{
+ Artifacts: make([]*ArtifactsViewItem, 0, len(artifacts)),
+ }
+ for _, art := range artifacts {
+ status := "completed"
+ if art.Status == actions_model.ArtifactStatusExpired {
+ status = "expired"
+ }
+ artifactsResponse.Artifacts = append(artifactsResponse.Artifacts, &ArtifactsViewItem{
+ Name: art.ArtifactName,
+ Size: art.FileSize,
+ Status: status,
+ })
+ }
+ ctx.JSON(http.StatusOK, artifactsResponse)
+}
+
+func ArtifactsDeleteView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ artifactName := ctx.Params("artifact_name")
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ ctx.NotFoundOrServerError("GetRunByIndex", func(err error) bool {
+ return errors.Is(err, util.ErrNotExist)
+ }, err)
+ return
+ }
+ if err = actions_model.SetArtifactNeedDelete(ctx, run.ID, artifactName); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func ArtifactsDownloadView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ artifactName := ctx.Params("artifact_name")
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
+ RunID: run.ID,
+ ArtifactName: artifactName,
+ })
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if len(artifacts) == 0 {
+ ctx.Error(http.StatusNotFound, "artifact not found")
+ return
+ }
+
+ // if artifacts status is not uploaded-confirmed, treat it as not found
+ for _, art := range artifacts {
+ if art.Status != int64(actions_model.ArtifactStatusUploadConfirmed) {
+ ctx.Error(http.StatusNotFound, "artifact not found")
+ return
+ }
+ }
+
+ // Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
+ // The v4 backend ensures ContentEncoding is set to "application/zip", which is not the case for the old backend
+ if len(artifacts) == 1 && artifacts[0].ArtifactName+".zip" == artifacts[0].ArtifactPath && artifacts[0].ContentEncoding == "application/zip" {
+ art := artifacts[0]
+ if setting.Actions.ArtifactStorage.MinioConfig.ServeDirect {
+ u, err := storage.ActionsArtifacts.URL(art.StoragePath, art.ArtifactPath)
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+ f, err := storage.ActionsArtifacts.Open(art.StoragePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ common.ServeContentByReadSeeker(ctx.Base, artifactName, util.ToPointer(art.UpdatedUnix.AsTime()), f)
+ return
+ }
+
+ // Artifacts using the v1-v3 backend are stored as multiple individual files per artifact on the backend
+ // Those need to be zipped for download
+ ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(artifactName), artifactName))
+ writer := zip.NewWriter(ctx.Resp)
+ defer writer.Close()
+ for _, art := range artifacts {
+ f, err := storage.ActionsArtifacts.Open(art.StoragePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ var r io.ReadCloser
+ if art.ContentEncoding == "gzip" {
+ r, err = gzip.NewReader(f)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ } else {
+ r = f
+ }
+ defer r.Close()
+
+ w, err := writer.Create(art.ArtifactPath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if _, err := io.Copy(w, r); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+}
+
+func DisableWorkflowFile(ctx *context_module.Context) {
+ disableOrEnableWorkflowFile(ctx, false)
+}
+
+func EnableWorkflowFile(ctx *context_module.Context) {
+ disableOrEnableWorkflowFile(ctx, true)
+}
+
+func disableOrEnableWorkflowFile(ctx *context_module.Context, isEnable bool) {
+ workflow := ctx.FormString("workflow")
+ if len(workflow) == 0 {
+ ctx.ServerError("workflow", nil)
+ return
+ }
+
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+
+ if isEnable {
+ cfg.EnableWorkflow(workflow)
+ } else {
+ cfg.DisableWorkflow(workflow)
+ }
+
+ if err := repo_model.UpdateRepoUnit(ctx, cfgUnit); err != nil {
+ ctx.ServerError("UpdateRepoUnit", err)
+ return
+ }
+
+ if isEnable {
+ ctx.Flash.Success(ctx.Tr("actions.workflow.enable_success", workflow))
+ } else {
+ ctx.Flash.Success(ctx.Tr("actions.workflow.disable_success", workflow))
+ }
+
+ redirectURL := fmt.Sprintf("%s/actions?workflow=%s&actor=%s&status=%s", ctx.Repo.RepoLink, url.QueryEscape(workflow),
+ url.QueryEscape(ctx.FormString("actor")), url.QueryEscape(ctx.FormString("status")))
+ ctx.JSONRedirect(redirectURL)
+}
diff --git a/routers/web/repo/activity.go b/routers/web/repo/activity.go
new file mode 100644
index 0000000..ba776c8
--- /dev/null
+++ b/routers/web/repo/activity.go
@@ -0,0 +1,105 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplActivity base.TplName = "repo/activity"
+)
+
+// Activity render the page to show repository latest changes
+func Activity(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity")
+ ctx.Data["PageIsActivity"] = true
+
+ ctx.Data["PageIsPulse"] = true
+
+ ctx.Data["Period"] = ctx.Params("period")
+
+ timeUntil := time.Now()
+ var timeFrom time.Time
+
+ switch ctx.Data["Period"] {
+ case "daily":
+ timeFrom = timeUntil.Add(-time.Hour * 24)
+ case "halfweekly":
+ timeFrom = timeUntil.Add(-time.Hour * 72)
+ case "weekly":
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ case "monthly":
+ timeFrom = timeUntil.AddDate(0, -1, 0)
+ case "quarterly":
+ timeFrom = timeUntil.AddDate(0, -3, 0)
+ case "semiyearly":
+ timeFrom = timeUntil.AddDate(0, -6, 0)
+ case "yearly":
+ timeFrom = timeUntil.AddDate(-1, 0, 0)
+ default:
+ ctx.Data["Period"] = "weekly"
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ }
+ ctx.Data["DateFrom"] = timeFrom.UTC().Format(time.RFC3339)
+ ctx.Data["DateUntil"] = timeUntil.UTC().Format(time.RFC3339)
+ ctx.Data["PeriodText"] = ctx.Tr("repo.activity.period." + ctx.Data["Period"].(string))
+
+ var err error
+ if ctx.Data["Activity"], err = activities_model.GetActivityStats(ctx, ctx.Repo.Repository, timeFrom,
+ ctx.Repo.CanRead(unit.TypeReleases),
+ ctx.Repo.CanRead(unit.TypeIssues),
+ ctx.Repo.CanRead(unit.TypePullRequests),
+ ctx.Repo.CanRead(unit.TypeCode) && !ctx.Repo.Repository.IsEmpty); err != nil {
+ ctx.ServerError("GetActivityStats", err)
+ return
+ }
+
+ if ctx.PageData["repoActivityTopAuthors"], err = activities_model.GetActivityStatsTopAuthors(ctx, ctx.Repo.Repository, timeFrom, 10); err != nil {
+ ctx.ServerError("GetActivityStatsTopAuthors", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplActivity)
+}
+
+// ActivityAuthors renders JSON with top commit authors for given time period over all branches
+func ActivityAuthors(ctx *context.Context) {
+ timeUntil := time.Now()
+ var timeFrom time.Time
+
+ switch ctx.Params("period") {
+ case "daily":
+ timeFrom = timeUntil.Add(-time.Hour * 24)
+ case "halfweekly":
+ timeFrom = timeUntil.Add(-time.Hour * 72)
+ case "weekly":
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ case "monthly":
+ timeFrom = timeUntil.AddDate(0, -1, 0)
+ case "quarterly":
+ timeFrom = timeUntil.AddDate(0, -3, 0)
+ case "semiyearly":
+ timeFrom = timeUntil.AddDate(0, -6, 0)
+ case "yearly":
+ timeFrom = timeUntil.AddDate(-1, 0, 0)
+ default:
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ }
+
+ var err error
+ authors, err := activities_model.GetActivityStatsTopAuthors(ctx, ctx.Repo.Repository, timeFrom, 10)
+ if err != nil {
+ ctx.ServerError("GetActivityStatsTopAuthors", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, authors)
+}
diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go
new file mode 100644
index 0000000..b42effd
--- /dev/null
+++ b/routers/web/repo/attachment.go
@@ -0,0 +1,163 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "net/http"
+
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/attachment"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// UploadIssueAttachment response for Issue/PR attachments
+func UploadIssueAttachment(ctx *context.Context) {
+ uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Attachment.AllowedTypes)
+}
+
+// UploadReleaseAttachment response for uploading release attachments
+func UploadReleaseAttachment(ctx *context.Context) {
+ uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Repository.Release.AllowedTypes)
+}
+
+// UploadAttachment response for uploading attachments
+func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
+ if !setting.Attachment.Enabled {
+ ctx.Error(http.StatusNotFound, "attachment is not enabled")
+ return
+ }
+
+ file, header, err := ctx.Req.FormFile("file")
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err))
+ return
+ }
+ defer file.Close()
+
+ attach, err := attachment.UploadAttachment(ctx, file, allowedTypes, header.Size, &repo_model.Attachment{
+ Name: header.Filename,
+ UploaderID: ctx.Doer.ID,
+ RepoID: repoID,
+ })
+ if err != nil {
+ if upload.IsErrFileTypeForbidden(err) {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("NewAttachment: %v", err))
+ return
+ }
+
+ log.Trace("New attachment uploaded: %s", attach.UUID)
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": attach.UUID,
+ })
+}
+
+// DeleteAttachment response for deleting issue's attachment
+func DeleteAttachment(ctx *context.Context) {
+ file := ctx.FormString("file")
+ attach, err := repo_model.GetAttachmentByUUID(ctx, file)
+ if err != nil {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+ if !ctx.IsSigned || (ctx.Doer.ID != attach.UploaderID) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ err = repo_model.DeleteAttachment(ctx, attach, true)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("DeleteAttachment: %v", err))
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": attach.UUID,
+ })
+}
+
+// GetAttachment serve attachments with the given UUID
+func ServeAttachment(ctx *context.Context, uuid string) {
+ attach, err := repo_model.GetAttachmentByUUID(ctx, uuid)
+ if err != nil {
+ if repo_model.IsErrAttachmentNotExist(err) {
+ ctx.Error(http.StatusNotFound)
+ } else {
+ ctx.ServerError("GetAttachmentByUUID", err)
+ }
+ return
+ }
+
+ repository, unitType, err := repo_service.LinkedRepository(ctx, attach)
+ if err != nil {
+ ctx.ServerError("LinkedRepository", err)
+ return
+ }
+
+ if repository == nil { // If not linked
+ if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ } else { // If we have the repository we check access
+ perm, err := access_model.GetUserRepoPermission(ctx, repository, ctx.Doer)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err.Error())
+ return
+ }
+ if !perm.CanRead(unitType) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ if attach.ExternalURL != "" {
+ ctx.Redirect(attach.ExternalURL)
+ return
+ }
+
+ if err := attach.IncreaseDownloadCount(ctx); err != nil {
+ ctx.ServerError("IncreaseDownloadCount", err)
+ return
+ }
+
+ if setting.Attachment.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.Attachments.URL(attach.RelativePath(), attach.Name)
+
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+
+ if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+attach.UUID+`"`) {
+ return
+ }
+
+ // If we have matched and access to release or issue
+ fr, err := storage.Attachments.Open(attach.RelativePath())
+ if err != nil {
+ ctx.ServerError("Open", err)
+ return
+ }
+ defer fr.Close()
+
+ common.ServeContentByReadSeeker(ctx.Base, attach.Name, util.ToPointer(attach.CreatedUnix.AsTime()), fr)
+}
+
+// GetAttachment serve attachments
+func GetAttachment(ctx *context.Context) {
+ ServeAttachment(ctx, ctx.Params(":uuid"))
+}
diff --git a/routers/web/repo/badges/badges.go b/routers/web/repo/badges/badges.go
new file mode 100644
index 0000000..a2306d5
--- /dev/null
+++ b/routers/web/repo/badges/badges.go
@@ -0,0 +1,164 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package badges
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/setting"
+ context_module "code.gitea.io/gitea/services/context"
+)
+
+func getBadgeURL(ctx *context_module.Context, label, text, color string) string {
+ sb := &strings.Builder{}
+ _ = setting.Badges.GeneratorURLTemplateTemplate.Execute(sb, map[string]string{
+ "label": url.PathEscape(strings.ReplaceAll(label, "-", "--")),
+ "text": url.PathEscape(strings.ReplaceAll(text, "-", "--")),
+ "color": url.PathEscape(color),
+ })
+
+ badgeURL := sb.String()
+ q := ctx.Req.URL.Query()
+ // Remove any `branch` or `event` query parameters. They're used by the
+ // workflow badge route, and do not need forwarding to the badge generator.
+ delete(q, "branch")
+ delete(q, "event")
+ if len(q) > 0 {
+ return fmt.Sprintf("%s?%s", badgeURL, q.Encode())
+ }
+ return badgeURL
+}
+
+func redirectToBadge(ctx *context_module.Context, label, text, color string) {
+ ctx.Redirect(getBadgeURL(ctx, label, text, color))
+}
+
+func errorBadge(ctx *context_module.Context, label, text string) { //nolint:unparam
+ ctx.Redirect(getBadgeURL(ctx, label, text, "crimson"))
+}
+
+func GetWorkflowBadge(ctx *context_module.Context) {
+ branch := ctx.Req.URL.Query().Get("branch")
+ if branch != "" {
+ branch = fmt.Sprintf("refs/heads/%s", branch)
+ }
+ event := ctx.Req.URL.Query().Get("event")
+
+ workflowFile := ctx.Params("workflow_name")
+ run, err := actions_model.GetLatestRunForBranchAndWorkflow(ctx, ctx.Repo.Repository.ID, branch, workflowFile, event)
+ if err != nil {
+ errorBadge(ctx, workflowFile, "Not found")
+ return
+ }
+
+ var color string
+ switch run.Status {
+ case actions_model.StatusUnknown:
+ color = "lightgrey"
+ case actions_model.StatusWaiting:
+ color = "lightgrey"
+ case actions_model.StatusRunning:
+ color = "gold"
+ case actions_model.StatusSuccess:
+ color = "brightgreen"
+ case actions_model.StatusFailure:
+ color = "crimson"
+ case actions_model.StatusCancelled:
+ color = "orange"
+ case actions_model.StatusSkipped:
+ color = "blue"
+ case actions_model.StatusBlocked:
+ color = "yellow"
+ default:
+ color = "lightgrey"
+ }
+
+ redirectToBadge(ctx, workflowFile, run.Status.String(), color)
+}
+
+func getIssueOrPullBadge(ctx *context_module.Context, label, variant string, num int) {
+ var text string
+ if len(variant) > 0 {
+ text = fmt.Sprintf("%d %s", num, variant)
+ } else {
+ text = fmt.Sprintf("%d", num)
+ }
+ redirectToBadge(ctx, label, text, "blue")
+}
+
+func getIssueBadge(ctx *context_module.Context, variant string, num int) {
+ if !ctx.Repo.CanRead(unit.TypeIssues) &&
+ !ctx.Repo.CanRead(unit.TypeExternalTracker) {
+ errorBadge(ctx, "issues", "Not found")
+ return
+ }
+
+ _, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil {
+ errorBadge(ctx, "issues", "Not found")
+ return
+ }
+
+ getIssueOrPullBadge(ctx, "issues", variant, num)
+}
+
+func getPullBadge(ctx *context_module.Context, variant string, num int) {
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ errorBadge(ctx, "pulls", "Not found")
+ return
+ }
+
+ getIssueOrPullBadge(ctx, "pulls", variant, num)
+}
+
+func GetOpenIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "open", ctx.Repo.Repository.NumOpenIssues)
+}
+
+func GetClosedIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "closed", ctx.Repo.Repository.NumClosedIssues)
+}
+
+func GetTotalIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "", ctx.Repo.Repository.NumIssues)
+}
+
+func GetOpenPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "open", ctx.Repo.Repository.NumOpenPulls)
+}
+
+func GetClosedPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "closed", ctx.Repo.Repository.NumClosedPulls)
+}
+
+func GetTotalPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "", ctx.Repo.Repository.NumPulls)
+}
+
+func GetStarsBadge(ctx *context_module.Context) {
+ redirectToBadge(ctx, "stars", fmt.Sprintf("%d", ctx.Repo.Repository.NumStars), "blue")
+}
+
+func GetLatestReleaseBadge(ctx *context_module.Context) {
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ errorBadge(ctx, "release", "Not found")
+ return
+ }
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ redirectToBadge(ctx, "release", release.TagName, "blue")
+}
diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go
new file mode 100644
index 0000000..eea3d4d
--- /dev/null
+++ b/routers/web/repo/blame.go
@@ -0,0 +1,298 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ gotemplate "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/highlight"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ files_service "code.gitea.io/gitea/services/repository/files"
+)
+
+type blameRow struct {
+ RowNumber int
+ Avatar gotemplate.HTML
+ RepoLink string
+ PartSha string
+ PreviousSha string
+ PreviousShaURL string
+ IsFirstCommit bool
+ CommitURL string
+ CommitMessage string
+ CommitSince gotemplate.HTML
+ Code gotemplate.HTML
+ EscapeStatus *charset.EscapeStatus
+}
+
+// RefBlame render blame page
+func RefBlame(ctx *context.Context) {
+ if ctx.Repo.TreePath == "" {
+ ctx.NotFound("No file specified", nil)
+ return
+ }
+
+ paths := make([]string, 0, 5)
+ treeNames := strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+ blob := entry.Blob()
+
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["IsBlame"] = true
+
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ ctx.Data["Paths"] = paths
+ ctx.Data["TreeNames"] = treeNames
+
+ ctx.Data["FileSize"] = blob.Size()
+ ctx.Data["FileName"] = blob.Name()
+
+ // Do not display a blame view if the size of the file is
+ // larger than what is configured as the maximum.
+ if blob.Size() >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ ctx.HTML(http.StatusOK, tplRepoHome)
+ return
+ }
+
+ ctx.Data["NumLinesSet"] = true
+ ctx.Data["NumLines"], err = blob.GetBlobLineCount()
+ if err != nil {
+ ctx.ServerError("GetBlobLineCount", err)
+ return
+ }
+
+ result, err := performBlame(ctx, ctx.Repo.Commit, ctx.Repo.TreePath, ctx.FormBool("bypass-blame-ignore"))
+ if err != nil {
+ ctx.ServerError("performBlame", err)
+ return
+ }
+
+ ctx.Data["UsesIgnoreRevs"] = result.UsesIgnoreRevs
+ ctx.Data["FaultyIgnoreRevsFile"] = result.FaultyIgnoreRevsFile
+
+ commitNames := processBlameParts(ctx, result.Parts)
+ if ctx.Written() {
+ return
+ }
+
+ renderBlame(ctx, result.Parts, commitNames)
+
+ ctx.HTML(http.StatusOK, tplRepoHome)
+}
+
+type blameResult struct {
+ Parts []*git.BlamePart
+ UsesIgnoreRevs bool
+ FaultyIgnoreRevsFile bool
+}
+
+func performBlame(ctx *context.Context, commit *git.Commit, file string, bypassBlameIgnore bool) (*blameResult, error) {
+ repoPath := ctx.Repo.Repository.RepoPath()
+ objectFormat := ctx.Repo.GetObjectFormat()
+
+ blameReader, err := git.CreateBlameReader(ctx, objectFormat, repoPath, commit, file, bypassBlameIgnore)
+ if err != nil {
+ return nil, err
+ }
+
+ r := &blameResult{}
+ if err := fillBlameResult(blameReader, r); err != nil {
+ _ = blameReader.Close()
+ return nil, err
+ }
+
+ err = blameReader.Close()
+ if err != nil {
+ if len(r.Parts) == 0 && r.UsesIgnoreRevs {
+ // try again without ignored revs
+
+ blameReader, err = git.CreateBlameReader(ctx, objectFormat, repoPath, commit, file, true)
+ if err != nil {
+ return nil, err
+ }
+
+ r := &blameResult{
+ FaultyIgnoreRevsFile: true,
+ }
+ if err := fillBlameResult(blameReader, r); err != nil {
+ _ = blameReader.Close()
+ return nil, err
+ }
+
+ return r, blameReader.Close()
+ }
+ return nil, err
+ }
+ return r, nil
+}
+
+func fillBlameResult(br *git.BlameReader, r *blameResult) error {
+ r.UsesIgnoreRevs = br.UsesIgnoreRevs()
+
+ previousHelper := make(map[string]*git.BlamePart)
+
+ r.Parts = make([]*git.BlamePart, 0, 5)
+ for {
+ blamePart, err := br.NextPart()
+ if err != nil {
+ return fmt.Errorf("BlameReader.NextPart failed: %w", err)
+ }
+ if blamePart == nil {
+ break
+ }
+
+ if prev, ok := previousHelper[blamePart.Sha]; ok {
+ if blamePart.PreviousSha == "" {
+ blamePart.PreviousSha = prev.PreviousSha
+ blamePart.PreviousPath = prev.PreviousPath
+ }
+ } else {
+ previousHelper[blamePart.Sha] = blamePart
+ }
+
+ r.Parts = append(r.Parts, blamePart)
+ }
+
+ return nil
+}
+
+func processBlameParts(ctx *context.Context, blameParts []*git.BlamePart) map[string]*user_model.UserCommit {
+ // store commit data by SHA to look up avatar info etc
+ commitNames := make(map[string]*user_model.UserCommit)
+ // and as blameParts can reference the same commits multiple
+ // times, we cache the lookup work locally
+ commits := make([]*git.Commit, 0, len(blameParts))
+ commitCache := map[string]*git.Commit{}
+ commitCache[ctx.Repo.Commit.ID.String()] = ctx.Repo.Commit
+
+ for _, part := range blameParts {
+ sha := part.Sha
+ if _, ok := commitNames[sha]; ok {
+ continue
+ }
+
+ // find the blamePart commit, to look up parent & email address for avatars
+ commit, ok := commitCache[sha]
+ var err error
+ if !ok {
+ commit, err = ctx.Repo.GitRepo.GetCommit(sha)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Repo.GitRepo.GetCommit", err)
+ } else {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ }
+ return nil
+ }
+ commitCache[sha] = commit
+ }
+
+ commits = append(commits, commit)
+ }
+
+ // populate commit email addresses to later look up avatars.
+ for _, c := range user_model.ValidateCommitsWithEmails(ctx, commits) {
+ commitNames[c.ID.String()] = c
+ }
+
+ return commitNames
+}
+
+func renderBlame(ctx *context.Context, blameParts []*git.BlamePart, commitNames map[string]*user_model.UserCommit) {
+ repoLink := ctx.Repo.RepoLink
+
+ language, err := files_service.TryGetContentLanguage(ctx.Repo.GitRepo, ctx.Repo.CommitID, ctx.Repo.TreePath)
+ if err != nil {
+ log.Error("Unable to get file language for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
+ }
+
+ lines := make([]string, 0)
+ rows := make([]*blameRow, 0)
+ escapeStatus := &charset.EscapeStatus{}
+
+ var lexerName string
+
+ avatarUtils := templates.NewAvatarUtils(ctx)
+ i := 0
+ commitCnt := 0
+ for _, part := range blameParts {
+ for index, line := range part.Lines {
+ i++
+ lines = append(lines, line)
+
+ br := &blameRow{
+ RowNumber: i,
+ }
+
+ commit := commitNames[part.Sha]
+ if index == 0 {
+ // Count commit number
+ commitCnt++
+
+ // User avatar image
+ commitSince := timeutil.TimeSinceUnix(timeutil.TimeStamp(commit.Author.When.Unix()), ctx.Locale)
+
+ var avatar string
+ if commit.User != nil {
+ avatar = string(avatarUtils.Avatar(commit.User, 18))
+ } else {
+ avatar = string(avatarUtils.AvatarByEmail(commit.Author.Email, commit.Author.Name, 18, "tw-mr-2"))
+ }
+
+ br.Avatar = gotemplate.HTML(avatar)
+ br.RepoLink = repoLink
+ br.PartSha = part.Sha
+ br.PreviousSha = part.PreviousSha
+ br.PreviousShaURL = fmt.Sprintf("%s/blame/commit/%s/%s", repoLink, url.PathEscape(part.PreviousSha), util.PathEscapeSegments(part.PreviousPath))
+ br.CommitURL = fmt.Sprintf("%s/commit/%s", repoLink, url.PathEscape(part.Sha))
+ br.CommitMessage = commit.CommitMessage
+ br.CommitSince = commitSince
+ }
+
+ if i != len(lines)-1 {
+ line += "\n"
+ }
+ fileName := fmt.Sprintf("%v", ctx.Data["FileName"])
+ line, lexerNameForLine := highlight.Code(fileName, language, line)
+
+ // set lexer name to the first detected lexer. this is certainly suboptimal and
+ // we should instead highlight the whole file at once
+ if lexerName == "" {
+ lexerName = lexerNameForLine
+ }
+
+ br.EscapeStatus, br.Code = charset.EscapeControlHTML(line, ctx.Locale, charset.FileviewContext)
+ rows = append(rows, br)
+ escapeStatus = escapeStatus.Or(br.EscapeStatus)
+ }
+ }
+
+ ctx.Data["EscapeStatus"] = escapeStatus
+ ctx.Data["BlameRows"] = rows
+ ctx.Data["CommitCnt"] = commitCnt
+ ctx.Data["LexerName"] = lexerName
+}
diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go
new file mode 100644
index 0000000..4897a5f
--- /dev/null
+++ b/routers/web/repo/branch.go
@@ -0,0 +1,262 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ release_service "code.gitea.io/gitea/services/release"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplBranch base.TplName = "repo/branch/list"
+)
+
+// Branches render repository branch page
+func Branches(ctx *context.Context) {
+ ctx.Data["Title"] = "Branches"
+ ctx.Data["IsRepoToolbarBranches"] = true
+ ctx.Data["AllowsPulls"] = ctx.Repo.Repository.AllowsPulls(ctx)
+ ctx.Data["IsWriter"] = ctx.Repo.CanWrite(unit.TypeCode)
+ ctx.Data["IsMirror"] = ctx.Repo.Repository.IsMirror
+ ctx.Data["CanPull"] = ctx.Repo.CanWrite(unit.TypeCode) ||
+ (ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID))
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["PageIsBranches"] = true
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ pageSize := setting.Git.BranchesRangeSize
+
+ kw := ctx.FormString("q")
+
+ defaultBranch, branches, branchesCount, err := repo_service.LoadBranches(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, optional.None[bool](), kw, page, pageSize)
+ if err != nil {
+ ctx.ServerError("LoadBranches", err)
+ return
+ }
+
+ commitIDs := []string{defaultBranch.DBBranch.CommitID}
+ for _, branch := range branches {
+ commitIDs = append(commitIDs, branch.DBBranch.CommitID)
+ }
+
+ commitStatuses, err := git_model.GetLatestCommitStatusForRepoCommitIDs(ctx, ctx.Repo.Repository.ID, commitIDs)
+ if err != nil {
+ ctx.ServerError("LoadBranches", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+
+ commitStatus := make(map[string]*git_model.CommitStatus)
+ for commitID, cs := range commitStatuses {
+ commitStatus[commitID] = git_model.CalcCommitStatus(cs)
+ }
+
+ ctx.Data["Keyword"] = kw
+ ctx.Data["Branches"] = branches
+ ctx.Data["CommitStatus"] = commitStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+ ctx.Data["DefaultBranchBranch"] = defaultBranch
+ pager := context.NewPagination(int(branchesCount), pageSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplBranch)
+}
+
+// DeleteBranchPost responses for delete merged branch
+func DeleteBranchPost(ctx *context.Context) {
+ defer redirect(ctx)
+ branchName := ctx.FormString("name")
+
+ if err := repo_service.DeleteBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil {
+ switch {
+ case git.IsErrBranchNotExist(err):
+ log.Debug("DeleteBranch: Can't delete non existing branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName))
+ case errors.Is(err, repo_service.ErrBranchIsDefault):
+ log.Debug("DeleteBranch: Can't delete default branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.default_deletion_failed", branchName))
+ case errors.Is(err, git_model.ErrBranchIsProtected):
+ log.Debug("DeleteBranch: Can't delete protected branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.protected_deletion_failed", branchName))
+ default:
+ log.Error("DeleteBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName))
+ }
+
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", branchName))
+}
+
+// RestoreBranchPost responses for delete merged branch
+func RestoreBranchPost(ctx *context.Context) {
+ defer redirect(ctx)
+
+ branchID := ctx.FormInt64("branch_id")
+ branchName := ctx.FormString("name")
+
+ deletedBranch, err := git_model.GetDeletedBranchByID(ctx, ctx.Repo.Repository.ID, branchID)
+ if err != nil {
+ log.Error("GetDeletedBranchByID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", branchName))
+ return
+ } else if deletedBranch == nil {
+ log.Debug("RestoreBranch: Can't restore branch[%d] '%s', as it does not exist", branchID, branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", branchName))
+ return
+ }
+
+ if err := git.Push(ctx, ctx.Repo.Repository.RepoPath(), git.PushOptions{
+ Remote: ctx.Repo.Repository.RepoPath(),
+ Branch: fmt.Sprintf("%s:%s%s", deletedBranch.CommitID, git.BranchPrefix, deletedBranch.Name),
+ Env: repo_module.PushingEnvironment(ctx.Doer, ctx.Repo.Repository),
+ }); err != nil {
+ if strings.Contains(err.Error(), "already exists") {
+ log.Debug("RestoreBranch: Can't restore branch '%s', since one with same name already exist", deletedBranch.Name)
+ ctx.Flash.Error(ctx.Tr("repo.branch.already_exists", deletedBranch.Name))
+ return
+ }
+ log.Error("RestoreBranch: CreateBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", deletedBranch.Name))
+ return
+ }
+
+ objectFormat := git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName)
+
+ // Don't return error below this
+ if err := repo_service.PushUpdate(
+ &repo_module.PushUpdateOptions{
+ RefFullName: git.RefNameFromBranch(deletedBranch.Name),
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: deletedBranch.CommitID,
+ PusherID: ctx.Doer.ID,
+ PusherName: ctx.Doer.Name,
+ RepoUserName: ctx.Repo.Owner.Name,
+ RepoName: ctx.Repo.Repository.Name,
+ }); err != nil {
+ log.Error("RestoreBranch: Update: %v", err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.restore_success", deletedBranch.Name))
+}
+
+func redirect(ctx *context.Context) {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/branches?page=" + url.QueryEscape(ctx.FormString("page")))
+}
+
+// CreateBranch creates new branch in repository
+func CreateBranch(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewBranchForm)
+ if !ctx.Repo.CanCreateBranch() {
+ ctx.NotFound("CreateBranch", nil)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.GetErrMsg())
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ var err error
+
+ if form.CreateTag {
+ target := ctx.Repo.CommitID
+ if ctx.Repo.IsViewBranch {
+ target = ctx.Repo.BranchName
+ }
+ err = release_service.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, target, form.NewBranchName, "")
+ } else if ctx.Repo.IsViewBranch {
+ err = repo_service.CreateNewBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.Repo.BranchName, form.NewBranchName)
+ } else {
+ err = repo_service.CreateNewBranchFromCommit(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.Repo.CommitID, form.NewBranchName)
+ }
+ if err != nil {
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrTagAlreadyExists(err) {
+ e := err.(models.ErrTagAlreadyExists)
+ ctx.Flash.Error(ctx.Tr("repo.branch.tag_collision", e.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git_model.IsErrBranchAlreadyExists(err) || git.IsErrPushOutOfDate(err) {
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_already_exists", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git_model.IsErrBranchNameConflict(err) {
+ e := err.(git_model.ErrBranchNameConflict)
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_name_conflict", form.NewBranchName, e.BranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git.IsErrPushRejected(err) {
+ e := err.(*git.ErrPushRejected)
+ if len(e.Message) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.editor.push_rejected_no_message"))
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(e.Message),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ ctx.ServerError("CreateNewBranch", err)
+ return
+ }
+
+ if form.CreateTag {
+ ctx.Flash.Success(ctx.Tr("repo.tag.create_success", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/tag/" + util.PathEscapeSegments(form.NewBranchName))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.create_success", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(form.NewBranchName) + "/" + util.PathEscapeSegments(form.CurrentPath))
+}
diff --git a/routers/web/repo/cherry_pick.go b/routers/web/repo/cherry_pick.go
new file mode 100644
index 0000000..90dae70
--- /dev/null
+++ b/routers/web/repo/cherry_pick.go
@@ -0,0 +1,192 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "errors"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/repository/files"
+)
+
+var tplCherryPick base.TplName = "repo/editor/cherry_pick"
+
+// CherryPick handles cherrypick GETs
+func CherryPick(ctx *context.Context) {
+ ctx.Data["SHA"] = ctx.Params(":sha")
+ cherryPickCommit, err := ctx.Repo.GitRepo.GetCommit(ctx.Params(":sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Missing Commit", err)
+ return
+ }
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+
+ if ctx.FormString("cherry-pick-type") == "revert" {
+ ctx.Data["CherryPickType"] = "revert"
+ ctx.Data["commit_summary"] = "revert " + ctx.Params(":sha")
+ ctx.Data["commit_message"] = "revert " + cherryPickCommit.Message()
+ } else {
+ ctx.Data["CherryPickType"] = "cherry-pick"
+ splits := strings.SplitN(cherryPickCommit.Message(), "\n", 2)
+ ctx.Data["commit_summary"] = splits[0]
+ ctx.Data["commit_message"] = splits[1]
+ }
+
+ canCommit := renderCommitRights(ctx)
+ ctx.Data["TreePath"] = ""
+
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ ctx.HTML(200, tplCherryPick)
+}
+
+// CherryPickPost handles cherrypick POSTs
+func CherryPickPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CherryPickForm)
+
+ sha := ctx.Params(":sha")
+ ctx.Data["SHA"] = sha
+ if form.Revert {
+ ctx.Data["CherryPickType"] = "revert"
+ } else {
+ ctx.Data["CherryPickType"] = "cherry-pick"
+ }
+
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ if ctx.HasError() {
+ ctx.HTML(200, tplCherryPick)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplCherryPick, &form)
+ return
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if message == "" {
+ if form.Revert {
+ message = ctx.Locale.TrString("repo.commit.revert-header", sha)
+ } else {
+ message = ctx.Locale.TrString("repo.commit.cherry-pick-header", sha)
+ }
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplCherryPick, &form)
+ if ctx.Written() {
+ return
+ }
+
+ opts := &files.ApplyDiffPatchOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }
+
+ // First lets try the simple plain read-tree -m approach
+ opts.Content = sha
+ if _, err := files.CherryPick(ctx, ctx.Repo.Repository, ctx.Doer, form.Revert, opts); err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplCherryPick, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ // Drop through to the apply technique
+
+ buf := &bytes.Buffer{}
+ if form.Revert {
+ if err := git.GetReverseRawDiff(ctx, ctx.Repo.Repository.RepoPath(), sha, buf); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff", errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+ } else {
+ if err := git.GetRawDiff(ctx.Repo.GitRepo, sha, git.RawDiffType("patch"), buf); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff", errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+ }
+
+ opts.Content = buf.String()
+ ctx.Data["FileContent"] = opts.Content
+
+ if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, opts); err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplCherryPick, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.editor.fail_to_apply_patch", err), tplPatchFile, &form)
+ return
+ }
+ }
+
+ if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName))
+ }
+}
diff --git a/routers/web/repo/code_frequency.go b/routers/web/repo/code_frequency.go
new file mode 100644
index 0000000..c76f492
--- /dev/null
+++ b/routers/web/repo/code_frequency.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplCodeFrequency base.TplName = "repo/activity"
+)
+
+// CodeFrequency renders the page to show repository code frequency
+func CodeFrequency(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.code_frequency")
+
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsCodeFrequency"] = true
+ ctx.PageData["repoLink"] = ctx.Repo.RepoLink
+
+ ctx.HTML(http.StatusOK, tplCodeFrequency)
+}
+
+// CodeFrequencyData returns JSON of code frequency data
+func CodeFrequencyData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("GetCodeFrequencyData", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
+ }
+}
diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go
new file mode 100644
index 0000000..0e5d1f0
--- /dev/null
+++ b/routers/web/repo/commit.go
@@ -0,0 +1,468 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "net/http"
+ "path"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitgraph"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/gitdiff"
+ git_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplCommits base.TplName = "repo/commits"
+ tplGraph base.TplName = "repo/graph"
+ tplGraphDiv base.TplName = "repo/graph/div"
+ tplCommitPage base.TplName = "repo/commit_page"
+)
+
+// RefCommits render commits page
+func RefCommits(ctx *context.Context) {
+ switch {
+ case len(ctx.Repo.TreePath) == 0:
+ Commits(ctx)
+ case ctx.Repo.TreePath == "search":
+ SearchCommits(ctx)
+ default:
+ FileHistory(ctx)
+ }
+}
+
+// Commits render branch's commits
+func Commits(ctx *context.Context) {
+ ctx.Data["PageIsCommits"] = true
+ if ctx.Repo.Commit == nil {
+ ctx.NotFound("Commit not found", nil)
+ return
+ }
+ ctx.Data["PageIsViewCode"] = true
+
+ commitsCount, err := ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ pageSize := ctx.FormInt("limit")
+ if pageSize <= 0 {
+ pageSize = setting.Git.CommitsRangeSize
+ }
+
+ // Both `git log branchName` and `git log commitId` work.
+ commits, err := ctx.Repo.Commit.CommitsByRange(page, pageSize, "")
+ if err != nil {
+ ctx.ServerError("CommitsByRange", err)
+ return
+ }
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["CommitCount"] = commitsCount
+
+ pager := context.NewPagination(int(commitsCount), pageSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+// Graph render commit graph - show commits from all branches.
+func Graph(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.commit_graph")
+ ctx.Data["PageIsCommits"] = true
+ ctx.Data["PageIsViewCode"] = true
+ mode := strings.ToLower(ctx.FormTrim("mode"))
+ if mode != "monochrome" {
+ mode = "color"
+ }
+ ctx.Data["Mode"] = mode
+ hidePRRefs := ctx.FormBool("hide-pr-refs")
+ ctx.Data["HidePRRefs"] = hidePRRefs
+ branches := ctx.FormStrings("branch")
+ realBranches := make([]string, len(branches))
+ copy(realBranches, branches)
+ for i, branch := range realBranches {
+ if strings.HasPrefix(branch, "--") {
+ realBranches[i] = git.BranchPrefix + branch
+ }
+ }
+ ctx.Data["SelectedBranches"] = realBranches
+ files := ctx.FormStrings("file")
+
+ commitsCount, err := ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+
+ graphCommitsCount, err := ctx.Repo.GetCommitGraphsCount(ctx, hidePRRefs, realBranches, files)
+ if err != nil {
+ log.Warn("GetCommitGraphsCount error for generate graph exclude prs: %t branches: %s in %-v, Will Ignore branches and try again. Underlying Error: %v", hidePRRefs, branches, ctx.Repo.Repository, err)
+ realBranches = []string{}
+ branches = []string{}
+ graphCommitsCount, err = ctx.Repo.GetCommitGraphsCount(ctx, hidePRRefs, realBranches, files)
+ if err != nil {
+ ctx.ServerError("GetCommitGraphsCount", err)
+ return
+ }
+ }
+
+ page := ctx.FormInt("page")
+
+ graph, err := gitgraph.GetCommitGraph(ctx.Repo.GitRepo, page, 0, hidePRRefs, realBranches, files)
+ if err != nil {
+ ctx.ServerError("GetCommitGraph", err)
+ return
+ }
+
+ if err := graph.LoadAndProcessCommits(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo); err != nil {
+ ctx.ServerError("LoadAndProcessCommits", err)
+ return
+ }
+
+ ctx.Data["Graph"] = graph
+
+ gitRefs, err := ctx.Repo.GitRepo.GetRefs()
+ if err != nil {
+ ctx.ServerError("GitRepo.GetRefs", err)
+ return
+ }
+
+ ctx.Data["AllRefs"] = gitRefs
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["CommitCount"] = commitsCount
+
+ paginator := context.NewPagination(int(graphCommitsCount), setting.UI.GraphMaxCommitNum, page, 5)
+ paginator.AddParam(ctx, "mode", "Mode")
+ paginator.AddParam(ctx, "hide-pr-refs", "HidePRRefs")
+ for _, branch := range branches {
+ paginator.AddParamString("branch", branch)
+ }
+ for _, file := range files {
+ paginator.AddParamString("file", file)
+ }
+ ctx.Data["Page"] = paginator
+ if ctx.FormBool("div-only") {
+ ctx.HTML(http.StatusOK, tplGraphDiv)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplGraph)
+}
+
+// SearchCommits render commits filtered by keyword
+func SearchCommits(ctx *context.Context) {
+ ctx.Data["PageIsCommits"] = true
+ ctx.Data["PageIsViewCode"] = true
+
+ query := ctx.FormTrim("q")
+ if len(query) == 0 {
+ ctx.Redirect(ctx.Repo.RepoLink + "/commits/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ all := ctx.FormBool("all")
+ opts := git.NewSearchCommitsOptions(query, all)
+ commits, err := ctx.Repo.Commit.SearchCommits(opts)
+ if err != nil {
+ ctx.ServerError("SearchCommits", err)
+ return
+ }
+ ctx.Data["CommitCount"] = len(commits)
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Keyword"] = query
+ if all {
+ ctx.Data["All"] = true
+ }
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+// FileHistory show a file's reversions
+func FileHistory(ctx *context.Context) {
+ fileName := ctx.Repo.TreePath
+ if len(fileName) == 0 {
+ Commits(ctx)
+ return
+ }
+
+ commitsCount, err := ctx.Repo.GitRepo.FileCommitsCount(ctx.Repo.RefName, fileName)
+ if err != nil {
+ ctx.ServerError("FileCommitsCount", err)
+ return
+ } else if commitsCount == 0 {
+ ctx.NotFound("FileCommitsCount", nil)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ commits, err := ctx.Repo.GitRepo.CommitsByFileAndRange(
+ git.CommitsByFileAndRangeOptions{
+ Revision: ctx.Repo.RefName,
+ File: fileName,
+ Page: page,
+ })
+ if err != nil {
+ ctx.ServerError("CommitsByFileAndRange", err)
+ return
+ }
+
+ if len(commits) == 0 {
+ ctx.NotFound("CommitsByFileAndRange", nil)
+ return
+ }
+
+ oldestCommit := commits[len(commits)-1]
+
+ renamedFiles, err := git.GetCommitFileRenames(ctx, ctx.Repo.GitRepo.Path, oldestCommit.ID.String())
+ if err != nil {
+ ctx.ServerError("GetCommitFileRenames", err)
+ return
+ }
+
+ for _, renames := range renamedFiles {
+ if renames[1] == fileName {
+ ctx.Data["OldFilename"] = renames[0]
+ ctx.Data["OldFilenameHistory"] = fmt.Sprintf("%s/commits/commit/%s/%s", ctx.Repo.RepoLink, oldestCommit.ID.String(), renames[0])
+ break
+ }
+ }
+
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["FileName"] = fileName
+ ctx.Data["CommitCount"] = commitsCount
+
+ pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+func LoadBranchesAndTags(ctx *context.Context) {
+ response, err := git_service.LoadBranchesAndTags(ctx, ctx.Repo, ctx.Params("sha"))
+ if err == nil {
+ ctx.JSON(http.StatusOK, response)
+ return
+ }
+ ctx.NotFoundOrServerError(fmt.Sprintf("could not load branches and tags the commit %s belongs to", ctx.Params("sha")), git.IsErrNotExist, err)
+}
+
+// Diff show different from current commit to previous commit
+func Diff(ctx *context.Context) {
+ ctx.Data["PageIsDiff"] = true
+
+ userName := ctx.Repo.Owner.Name
+ repoName := ctx.Repo.Repository.Name
+ commitID := ctx.Params(":sha")
+ var (
+ gitRepo *git.Repository
+ err error
+ )
+
+ if ctx.Data["PageIsWiki"] != nil {
+ gitRepo, err = gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ return
+ }
+ defer gitRepo.Close()
+ } else {
+ gitRepo = ctx.Repo.GitRepo
+ }
+
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Repo.GitRepo.GetCommit", err)
+ } else {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ }
+ return
+ }
+ if len(commitID) != commit.ID.Type().FullLength() {
+ commitID = commit.ID.String()
+ }
+
+ fileOnly := ctx.FormBool("file-only")
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if fileOnly && (len(files) == 2 || len(files) == 1) {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
+ AfterCommitID: commitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }, files...)
+ if err != nil {
+ ctx.NotFound("GetDiff", err)
+ return
+ }
+
+ parents := make([]string, commit.ParentCount())
+ for i := 0; i < commit.ParentCount(); i++ {
+ sha, err := commit.ParentID(i)
+ if err != nil {
+ ctx.NotFound("repo.Diff", err)
+ return
+ }
+ parents[i] = sha.String()
+ }
+
+ ctx.Data["CommitID"] = commitID
+ ctx.Data["AfterCommitID"] = commitID
+ ctx.Data["Username"] = userName
+ ctx.Data["Reponame"] = repoName
+
+ var parentCommit *git.Commit
+ if commit.ParentCount() > 0 {
+ parentCommit, err = gitRepo.GetCommit(parents[0])
+ if err != nil {
+ ctx.NotFound("GetParentCommit", err)
+ return
+ }
+ }
+ setCompareContext(ctx, parentCommit, commit, userName, repoName)
+ ctx.Data["Title"] = commit.Summary() + " · " + base.ShortSha(commitID)
+ ctx.Data["Commit"] = commit
+ ctx.Data["Diff"] = diff
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptionsAll)
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ }
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, statuses)
+ }
+
+ ctx.Data["CommitStatus"] = git_model.CalcCommitStatus(statuses)
+ ctx.Data["CommitStatuses"] = statuses
+
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ ctx.Data["Verification"] = verification
+ ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, commit)
+ ctx.Data["Parents"] = parents
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
+ }, nil); err != nil {
+ ctx.ServerError("CalculateTrustStatus", err)
+ return
+ }
+
+ note := &git.Note{}
+ err = git.GetNote(ctx, ctx.Repo.GitRepo, commitID, note)
+ if err == nil {
+ ctx.Data["NoteCommit"] = note.Commit
+ ctx.Data["NoteAuthor"] = user_model.ValidateCommitWithEmail(ctx, note.Commit)
+ ctx.Data["NoteRendered"], err = markup.RenderCommitMessage(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: path.Join("commit", util.PathEscapeSegments(commitID)),
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{}))))
+ if err != nil {
+ ctx.ServerError("RenderCommitMessage", err)
+ return
+ }
+ }
+
+ ctx.HTML(http.StatusOK, tplCommitPage)
+}
+
+// RawDiff dumps diff results of repository in given commit ID to io.Writer
+func RawDiff(ctx *context.Context) {
+ var gitRepo *git.Repository
+ if ctx.Data["PageIsWiki"] != nil {
+ wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return
+ }
+ defer wikiRepo.Close()
+ gitRepo = wikiRepo
+ } else {
+ gitRepo = ctx.Repo.GitRepo
+ if gitRepo == nil {
+ ctx.ServerError("GitRepo not open", fmt.Errorf("no open git repo for '%s'", ctx.Repo.Repository.FullName()))
+ return
+ }
+ }
+ if err := git.GetRawDiff(
+ gitRepo,
+ ctx.Params(":sha"),
+ git.RawDiffType(ctx.Params(":ext")),
+ ctx.Resp,
+ ); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff",
+ errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+}
+
+func processGitCommits(ctx *context.Context, gitCommits []*git.Commit) []*git_model.SignCommitWithStatuses {
+ commits := git_model.ConvertFromGitCommit(ctx, gitCommits, ctx.Repo.Repository)
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ for _, commit := range commits {
+ if commit.Status == nil {
+ continue
+ }
+ commit.Status.HideActionsURL(ctx)
+ git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
+ }
+ }
+ return commits
+}
diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go
new file mode 100644
index 0000000..38d6004
--- /dev/null
+++ b/routers/web/repo/compare.go
@@ -0,0 +1,972 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bufio"
+ gocontext "context"
+ "encoding/csv"
+ "errors"
+ "fmt"
+ "html"
+ "io"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ csv_module "code.gitea.io/gitea/modules/csv"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+const (
+ tplCompare base.TplName = "repo/diff/compare"
+ tplBlobExcerpt base.TplName = "repo/diff/blob_excerpt"
+ tplDiffBox base.TplName = "repo/diff/box"
+)
+
+// setCompareContext sets context data.
+func setCompareContext(ctx *context.Context, before, head *git.Commit, headOwner, headName string) {
+ ctx.Data["BeforeCommit"] = before
+ ctx.Data["HeadCommit"] = head
+
+ ctx.Data["GetBlobByPathForCommit"] = func(commit *git.Commit, path string) *git.Blob {
+ if commit == nil {
+ return nil
+ }
+
+ blob, err := commit.GetBlobByPath(path)
+ if err != nil {
+ return nil
+ }
+ return blob
+ }
+
+ ctx.Data["GetSniffedTypeForBlob"] = func(blob *git.Blob) typesniffer.SniffedType {
+ st := typesniffer.SniffedType{}
+
+ if blob == nil {
+ return st
+ }
+
+ st, err := blob.GuessContentType()
+ if err != nil {
+ log.Error("GuessContentType failed: %v", err)
+ return st
+ }
+ return st
+ }
+
+ setPathsCompareContext(ctx, before, head, headOwner, headName)
+ setImageCompareContext(ctx)
+ setCsvCompareContext(ctx)
+}
+
+// SourceCommitURL creates a relative URL for a commit in the given repository
+func SourceCommitURL(owner, name string, commit *git.Commit) string {
+ return setting.AppSubURL + "/" + url.PathEscape(owner) + "/" + url.PathEscape(name) + "/src/commit/" + url.PathEscape(commit.ID.String())
+}
+
+// RawCommitURL creates a relative URL for the raw commit in the given repository
+func RawCommitURL(owner, name string, commit *git.Commit) string {
+ return setting.AppSubURL + "/" + url.PathEscape(owner) + "/" + url.PathEscape(name) + "/raw/commit/" + url.PathEscape(commit.ID.String())
+}
+
+// setPathsCompareContext sets context data for source and raw paths
+func setPathsCompareContext(ctx *context.Context, base, head *git.Commit, headOwner, headName string) {
+ ctx.Data["SourcePath"] = SourceCommitURL(headOwner, headName, head)
+ ctx.Data["RawPath"] = RawCommitURL(headOwner, headName, head)
+ if base != nil {
+ ctx.Data["BeforeSourcePath"] = SourceCommitURL(headOwner, headName, base)
+ ctx.Data["BeforeRawPath"] = RawCommitURL(headOwner, headName, base)
+ }
+}
+
+// setImageCompareContext sets context data that is required by image compare template
+func setImageCompareContext(ctx *context.Context) {
+ ctx.Data["IsSniffedTypeAnImage"] = func(st typesniffer.SniffedType) bool {
+ return st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage())
+ }
+}
+
+// setCsvCompareContext sets context data that is required by the CSV compare template
+func setCsvCompareContext(ctx *context.Context) {
+ ctx.Data["IsCsvFile"] = func(diffFile *gitdiff.DiffFile) bool {
+ extension := strings.ToLower(filepath.Ext(diffFile.Name))
+ return extension == ".csv" || extension == ".tsv"
+ }
+
+ type CsvDiffResult struct {
+ Sections []*gitdiff.TableDiffSection
+ Error string
+ }
+
+ ctx.Data["CreateCsvDiff"] = func(diffFile *gitdiff.DiffFile, baseBlob, headBlob *git.Blob) CsvDiffResult {
+ if diffFile == nil {
+ return CsvDiffResult{nil, ""}
+ }
+
+ errTooLarge := errors.New(ctx.Locale.TrString("repo.error.csv.too_large"))
+
+ csvReaderFromCommit := func(ctx *markup.RenderContext, blob *git.Blob) (*csv.Reader, io.Closer, error) {
+ if blob == nil {
+ // It's ok for blob to be nil (file added or deleted)
+ return nil, nil, nil
+ }
+
+ if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < blob.Size() {
+ return nil, nil, errTooLarge
+ }
+
+ reader, err := blob.DataAsync()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ csvReader, err := csv_module.CreateReaderAndDetermineDelimiter(ctx, charset.ToUTF8WithFallbackReader(reader, charset.ConvertOpts{}))
+ return csvReader, reader, err
+ }
+
+ baseReader, baseBlobCloser, err := csvReaderFromCommit(&markup.RenderContext{Ctx: ctx, RelativePath: diffFile.OldName}, baseBlob)
+ if baseBlobCloser != nil {
+ defer baseBlobCloser.Close()
+ }
+ if err != nil {
+ if err == errTooLarge {
+ return CsvDiffResult{nil, err.Error()}
+ }
+ log.Error("error whilst creating csv.Reader from file %s in base commit %s in %s: %v", diffFile.Name, baseBlob.ID.String(), ctx.Repo.Repository.Name, err)
+ return CsvDiffResult{nil, "unable to load file"}
+ }
+
+ headReader, headBlobCloser, err := csvReaderFromCommit(&markup.RenderContext{Ctx: ctx, RelativePath: diffFile.Name}, headBlob)
+ if headBlobCloser != nil {
+ defer headBlobCloser.Close()
+ }
+ if err != nil {
+ if err == errTooLarge {
+ return CsvDiffResult{nil, err.Error()}
+ }
+ log.Error("error whilst creating csv.Reader from file %s in head commit %s in %s: %v", diffFile.Name, headBlob.ID.String(), ctx.Repo.Repository.Name, err)
+ return CsvDiffResult{nil, "unable to load file"}
+ }
+
+ sections, err := gitdiff.CreateCsvDiff(diffFile, baseReader, headReader)
+ if err != nil {
+ errMessage, err := csv_module.FormatError(err, ctx.Locale)
+ if err != nil {
+ log.Error("CreateCsvDiff FormatError failed: %v", err)
+ return CsvDiffResult{nil, "unknown csv diff error"}
+ }
+ return CsvDiffResult{nil, errMessage}
+ }
+ return CsvDiffResult{sections, ""}
+ }
+}
+
+// ParseCompareInfo parse compare info between two commit for preparing comparing references
+func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
+ baseRepo := ctx.Repo.Repository
+ ci := &common.CompareInfo{}
+
+ fileOnly := ctx.FormBool("file-only")
+
+ // Get compared branches information
+ // A full compare url is of the form:
+ //
+ // 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch}
+ // 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch}
+ // 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch}
+ // 4. /{:baseOwner}/{:baseRepoName}/compare/{:headBranch}
+ // 5. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}:{:headBranch}
+ // 6. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}/{:headRepoName}:{:headBranch}
+ //
+ // Here we obtain the infoPath "{:baseBranch}...[{:headOwner}/{:headRepoName}:]{:headBranch}" as ctx.Params("*")
+ // with the :baseRepo in ctx.Repo.
+ //
+ // Note: Generally :headRepoName is not provided here - we are only passed :headOwner.
+ //
+ // How do we determine the :headRepo?
+ //
+ // 1. If :headOwner is not set then the :headRepo = :baseRepo
+ // 2. If :headOwner is set - then look for the fork of :baseRepo owned by :headOwner
+ // 3. But... :baseRepo could be a fork of :headOwner's repo - so check that
+ // 4. Now, :baseRepo and :headRepos could be forks of the same repo - so check that
+ //
+ // format: <base branch>...[<head repo>:]<head branch>
+ // base<-head: master...head:feature
+ // same repo: master...feature
+
+ var (
+ isSameRepo bool
+ infoPath string
+ err error
+ )
+
+ infoPath = ctx.Params("*")
+ var infos []string
+ if infoPath == "" {
+ infos = []string{baseRepo.DefaultBranch, baseRepo.DefaultBranch}
+ } else {
+ infos = strings.SplitN(infoPath, "...", 2)
+ if len(infos) != 2 {
+ if infos = strings.SplitN(infoPath, "..", 2); len(infos) == 2 {
+ ci.DirectComparison = true
+ ctx.Data["PageIsComparePull"] = false
+ } else {
+ infos = []string{baseRepo.DefaultBranch, infoPath}
+ }
+ }
+ }
+
+ ctx.Data["BaseName"] = baseRepo.OwnerName
+ ci.BaseBranch = infos[0]
+ ctx.Data["BaseBranch"] = ci.BaseBranch
+
+ // If there is no head repository, it means compare between same repository.
+ headInfos := strings.Split(infos[1], ":")
+ if len(headInfos) == 1 {
+ isSameRepo = true
+ ci.HeadUser = ctx.Repo.Owner
+ ci.HeadBranch = headInfos[0]
+ } else if len(headInfos) == 2 {
+ headInfosSplit := strings.Split(headInfos[0], "/")
+ if len(headInfosSplit) == 1 {
+ ci.HeadUser, err = user_model.GetUserByName(ctx, headInfos[0])
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("GetUserByName", nil)
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return nil
+ }
+ ci.HeadBranch = headInfos[1]
+ isSameRepo = ci.HeadUser.ID == ctx.Repo.Owner.ID
+ if isSameRepo {
+ ci.HeadRepo = baseRepo
+ }
+ } else {
+ ci.HeadRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, headInfosSplit[0], headInfosSplit[1])
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.NotFound("GetRepositoryByOwnerAndName", nil)
+ } else {
+ ctx.ServerError("GetRepositoryByOwnerAndName", err)
+ }
+ return nil
+ }
+ if err := ci.HeadRepo.LoadOwner(ctx); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("GetUserByName", nil)
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return nil
+ }
+ ci.HeadBranch = headInfos[1]
+ ci.HeadUser = ci.HeadRepo.Owner
+ isSameRepo = ci.HeadRepo.ID == ctx.Repo.Repository.ID
+ }
+ } else {
+ ctx.NotFound("CompareAndPullRequest", nil)
+ return nil
+ }
+ ctx.Data["HeadUser"] = ci.HeadUser
+ ctx.Data["HeadBranch"] = ci.HeadBranch
+ ctx.Repo.PullRequest.SameRepo = isSameRepo
+
+ // Check if base branch is valid.
+ baseIsCommit := ctx.Repo.GitRepo.IsCommitExist(ci.BaseBranch)
+ baseIsBranch := ctx.Repo.GitRepo.IsBranchExist(ci.BaseBranch)
+ baseIsTag := ctx.Repo.GitRepo.IsTagExist(ci.BaseBranch)
+
+ if !baseIsCommit && !baseIsBranch && !baseIsTag {
+ // Check if baseBranch is short sha commit hash
+ if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(ci.BaseBranch); baseCommit != nil {
+ ci.BaseBranch = baseCommit.ID.String()
+ ctx.Data["BaseBranch"] = ci.BaseBranch
+ baseIsCommit = true
+ } else if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
+ if isSameRepo {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
+ }
+ return nil
+ } else {
+ ctx.NotFound("IsRefExist", nil)
+ return nil
+ }
+ }
+ ctx.Data["BaseIsCommit"] = baseIsCommit
+ ctx.Data["BaseIsBranch"] = baseIsBranch
+ ctx.Data["BaseIsTag"] = baseIsTag
+ ctx.Data["IsPull"] = true
+
+ // Now we have the repository that represents the base
+
+ // The current base and head repositories and branches may not
+ // actually be the intended branches that the user wants to
+ // create a pull-request from - but also determining the head
+ // repo is difficult.
+
+ // We will want therefore to offer a few repositories to set as
+ // our base and head
+
+ // 1. First if the baseRepo is a fork get the "RootRepo" it was
+ // forked from
+ var rootRepo *repo_model.Repository
+ if baseRepo.IsFork {
+ err = baseRepo.GetBaseRepo(ctx)
+ if err != nil {
+ if !repo_model.IsErrRepoNotExist(err) {
+ ctx.ServerError("Unable to find root repo", err)
+ return nil
+ }
+ } else {
+ rootRepo = baseRepo.BaseRepo
+ }
+ }
+
+ // 2. Now if the current user is not the owner of the baseRepo,
+ // check if they have a fork of the base repo and offer that as
+ // "OwnForkRepo"
+ var ownForkRepo *repo_model.Repository
+ if ctx.Doer != nil && baseRepo.OwnerID != ctx.Doer.ID {
+ repo := repo_model.GetForkedRepo(ctx, ctx.Doer.ID, baseRepo.ID)
+ if repo != nil {
+ ownForkRepo = repo
+ ctx.Data["OwnForkRepo"] = ownForkRepo
+ }
+ }
+
+ has := ci.HeadRepo != nil
+ // 3. If the base is a forked from "RootRepo" and the owner of
+ // the "RootRepo" is the :headUser - set headRepo to that
+ if !has && rootRepo != nil && rootRepo.OwnerID == ci.HeadUser.ID {
+ ci.HeadRepo = rootRepo
+ has = true
+ }
+
+ // 4. If the ctx.Doer has their own fork of the baseRepo and the headUser is the ctx.Doer
+ // set the headRepo to the ownFork
+ if !has && ownForkRepo != nil && ownForkRepo.OwnerID == ci.HeadUser.ID {
+ ci.HeadRepo = ownForkRepo
+ has = true
+ }
+
+ // 5. If the headOwner has a fork of the baseRepo - use that
+ if !has {
+ ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ID)
+ has = ci.HeadRepo != nil
+ }
+
+ // 6. If the baseRepo is a fork and the headUser has a fork of that use that
+ if !has && baseRepo.IsFork {
+ ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ForkID)
+ has = ci.HeadRepo != nil
+ }
+
+ // 7. Otherwise if we're not the same repo and haven't found a repo give up
+ if !isSameRepo && !has {
+ ctx.Data["PageIsComparePull"] = false
+ }
+
+ // 8. Finally open the git repo
+ if isSameRepo {
+ ci.HeadRepo = ctx.Repo.Repository
+ ci.HeadGitRepo = ctx.Repo.GitRepo
+ } else if has {
+ ci.HeadGitRepo, err = gitrepo.OpenRepository(ctx, ci.HeadRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer ci.HeadGitRepo.Close()
+ } else {
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ ctx.Data["HeadRepo"] = ci.HeadRepo
+ ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository
+
+ // Now we need to assert that the ctx.Doer has permission to read
+ // the baseRepo's code and pulls
+ // (NOT headRepo's)
+ permBase, err := access_model.GetUserRepoPermission(ctx, baseRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+ if !permBase.CanRead(unit.TypeCode) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in baseRepo has Permissions: %-+v",
+ ctx.Doer,
+ baseRepo,
+ permBase)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ // If we're not merging from the same repo:
+ if !isSameRepo {
+ // Assert ctx.Doer has permission to read headRepo's codes
+ permHead, err := access_model.GetUserRepoPermission(ctx, ci.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+ if !permHead.CanRead(unit.TypeCode) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v",
+ ctx.Doer,
+ ci.HeadRepo,
+ permHead)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+ ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode)
+ }
+
+ // If we have a rootRepo and it's different from:
+ // 1. the computed base
+ // 2. the computed head
+ // then get the branches of it
+ if rootRepo != nil &&
+ rootRepo.ID != ci.HeadRepo.ID &&
+ rootRepo.ID != baseRepo.ID {
+ canRead := access_model.CheckRepoUnitUser(ctx, rootRepo, ctx.Doer, unit.TypeCode)
+ if canRead {
+ ctx.Data["RootRepo"] = rootRepo
+ if !fileOnly {
+ branches, tags, err := getBranchesAndTagsForRepo(ctx, rootRepo)
+ if err != nil {
+ ctx.ServerError("GetBranchesForRepo", err)
+ return nil
+ }
+
+ ctx.Data["RootRepoBranches"] = branches
+ ctx.Data["RootRepoTags"] = tags
+ }
+ }
+ }
+
+ // If we have a ownForkRepo and it's different from:
+ // 1. The computed base
+ // 2. The computed head
+ // 3. The rootRepo (if we have one)
+ // then get the branches from it.
+ if ownForkRepo != nil &&
+ ownForkRepo.ID != ci.HeadRepo.ID &&
+ ownForkRepo.ID != baseRepo.ID &&
+ (rootRepo == nil || ownForkRepo.ID != rootRepo.ID) {
+ canRead := access_model.CheckRepoUnitUser(ctx, ownForkRepo, ctx.Doer, unit.TypeCode)
+ if canRead {
+ ctx.Data["OwnForkRepo"] = ownForkRepo
+ if !fileOnly {
+ branches, tags, err := getBranchesAndTagsForRepo(ctx, ownForkRepo)
+ if err != nil {
+ ctx.ServerError("GetBranchesForRepo", err)
+ return nil
+ }
+ ctx.Data["OwnForkRepoBranches"] = branches
+ ctx.Data["OwnForkRepoTags"] = tags
+ }
+ }
+ }
+
+ // Check if head branch is valid.
+ headIsCommit := ci.HeadGitRepo.IsCommitExist(ci.HeadBranch)
+ headIsBranch := ci.HeadGitRepo.IsBranchExist(ci.HeadBranch)
+ headIsTag := ci.HeadGitRepo.IsTagExist(ci.HeadBranch)
+ if !headIsCommit && !headIsBranch && !headIsTag {
+ // Check if headBranch is short sha commit hash
+ if headCommit, _ := ci.HeadGitRepo.GetCommit(ci.HeadBranch); headCommit != nil {
+ ci.HeadBranch = headCommit.ID.String()
+ ctx.Data["HeadBranch"] = ci.HeadBranch
+ headIsCommit = true
+ } else {
+ ctx.NotFound("IsRefExist", nil)
+ return nil
+ }
+ }
+ ctx.Data["HeadIsCommit"] = headIsCommit
+ ctx.Data["HeadIsBranch"] = headIsBranch
+ ctx.Data["HeadIsTag"] = headIsTag
+
+ // Treat as pull request if both references are branches
+ if ctx.Data["PageIsComparePull"] == nil {
+ ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch
+ }
+
+ if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot create/read pull requests in Repo: %-v\nUser in baseRepo has Permissions: %-+v",
+ ctx.Doer,
+ baseRepo,
+ permBase)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ baseBranchRef := ci.BaseBranch
+ if baseIsBranch {
+ baseBranchRef = git.BranchPrefix + ci.BaseBranch
+ } else if baseIsTag {
+ baseBranchRef = git.TagPrefix + ci.BaseBranch
+ }
+ headBranchRef := ci.HeadBranch
+ if headIsBranch {
+ headBranchRef = git.BranchPrefix + ci.HeadBranch
+ } else if headIsTag {
+ headBranchRef = git.TagPrefix + ci.HeadBranch
+ }
+
+ ci.CompareInfo, err = ci.HeadGitRepo.GetCompareInfo(baseRepo.RepoPath(), baseBranchRef, headBranchRef, ci.DirectComparison, fileOnly)
+ if err != nil {
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+ if ci.DirectComparison {
+ ctx.Data["BeforeCommitID"] = ci.CompareInfo.BaseCommitID
+ } else {
+ ctx.Data["BeforeCommitID"] = ci.CompareInfo.MergeBase
+ }
+
+ return ci
+}
+
+// PrepareCompareDiff renders compare diff page
+func PrepareCompareDiff(
+ ctx *context.Context,
+ ci *common.CompareInfo,
+ whitespaceBehavior git.TrustedCmdArgs,
+) bool {
+ var (
+ repo = ctx.Repo.Repository
+ err error
+ title string
+ )
+
+ // Get diff information.
+ ctx.Data["CommitRepoLink"] = ci.HeadRepo.Link()
+
+ headCommitID := ci.CompareInfo.HeadCommitID
+
+ ctx.Data["AfterCommitID"] = headCommitID
+
+ if (headCommitID == ci.CompareInfo.MergeBase && !ci.DirectComparison) ||
+ headCommitID == ci.CompareInfo.BaseCommitID {
+ ctx.Data["IsNothingToCompare"] = true
+ if unit, err := repo.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+
+ if !config.AutodetectManualMerge {
+ allowEmptyPr := !(ci.BaseBranch == ci.HeadBranch && ctx.Repo.Repository.Name == ci.HeadRepo.Name)
+ ctx.Data["AllowEmptyPr"] = allowEmptyPr
+
+ return !allowEmptyPr
+ }
+
+ ctx.Data["AllowEmptyPr"] = false
+ }
+ return true
+ }
+
+ beforeCommitID := ci.CompareInfo.MergeBase
+ if ci.DirectComparison {
+ beforeCommitID = ci.CompareInfo.BaseCommitID
+ }
+
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if len(files) == 2 || len(files) == 1 {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diff, err := gitdiff.GetDiff(ctx, ci.HeadGitRepo,
+ &gitdiff.DiffOptions{
+ BeforeCommitID: beforeCommitID,
+ AfterCommitID: headCommitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: whitespaceBehavior,
+ DirectComparison: ci.DirectComparison,
+ }, ctx.FormStrings("files")...)
+ if err != nil {
+ ctx.ServerError("GetDiffRangeWithWhitespaceBehavior", err)
+ return false
+ }
+ ctx.Data["Diff"] = diff
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ headCommit, err := ci.HeadGitRepo.GetCommit(headCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return false
+ }
+
+ baseGitRepo := ctx.Repo.GitRepo
+
+ beforeCommit, err := baseGitRepo.GetCommit(beforeCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return false
+ }
+
+ commits := processGitCommits(ctx, ci.CompareInfo.Commits)
+ ctx.Data["Commits"] = commits
+ ctx.Data["CommitCount"] = len(commits)
+
+ if len(commits) == 1 {
+ c := commits[0]
+ title = strings.TrimSpace(c.UserCommit.Summary())
+
+ body := strings.Split(strings.TrimSpace(c.UserCommit.Message()), "\n")
+ if len(body) > 1 {
+ ctx.Data["content"] = strings.Join(body[1:], "\n")
+ }
+ } else {
+ title = ci.HeadBranch
+ }
+ if len(title) > 255 {
+ var trailer string
+ title, trailer = util.SplitStringAtByteN(title, 255)
+ if len(trailer) > 0 {
+ if ctx.Data["content"] != nil {
+ ctx.Data["content"] = fmt.Sprintf("%s\n\n%s", trailer, ctx.Data["content"])
+ } else {
+ ctx.Data["content"] = trailer + "\n"
+ }
+ }
+ }
+
+ ctx.Data["title"] = title
+ ctx.Data["Username"] = ci.HeadUser.Name
+ ctx.Data["Reponame"] = ci.HeadRepo.Name
+
+ setCompareContext(ctx, beforeCommit, headCommit, ci.HeadUser.Name, ci.HeadRepo.Name)
+
+ return false
+}
+
+func getBranchesAndTagsForRepo(ctx gocontext.Context, repo *repo_model.Repository) (branches, tags []string, err error) {
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer gitRepo.Close()
+
+ branches, err = git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: repo.ID,
+ ListOptions: db.ListOptionsAll,
+ IsDeletedBranch: optional.Some(false),
+ })
+ if err != nil {
+ return nil, nil, err
+ }
+ tags, err = gitRepo.GetTags(0, 0)
+ if err != nil {
+ return nil, nil, err
+ }
+ return branches, tags, nil
+}
+
+// CompareDiff show different from one commit to another commit
+func CompareDiff(ctx *context.Context) {
+ ci := ParseCompareInfo(ctx)
+ defer func() {
+ if ci != nil && ci.HeadGitRepo != nil {
+ ci.HeadGitRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["DirectComparison"] = ci.DirectComparison
+ ctx.Data["OtherCompareSeparator"] = ".."
+ ctx.Data["CompareSeparator"] = "..."
+ if ci.DirectComparison {
+ ctx.Data["CompareSeparator"] = ".."
+ ctx.Data["OtherCompareSeparator"] = "..."
+ }
+
+ nothingToCompare := PrepareCompareDiff(ctx, ci,
+ gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
+ if ctx.Written() {
+ return
+ }
+
+ baseTags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = baseTags
+
+ fileOnly := ctx.FormBool("file-only")
+ if fileOnly {
+ ctx.HTML(http.StatusOK, tplDiffBox)
+ return
+ }
+
+ headBranches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: ci.HeadRepo.ID,
+ ListOptions: db.ListOptionsAll,
+ IsDeletedBranch: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("GetBranches", err)
+ return
+ }
+ ctx.Data["HeadBranches"] = headBranches
+
+ // For compare repo branches
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ headTags, err := repo_model.GetTagNamesByRepoID(ctx, ci.HeadRepo.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["HeadTags"] = headTags
+
+ if ctx.Data["PageIsComparePull"] == true {
+ pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, ctx.Repo.Repository.ID, ci.HeadBranch, ci.BaseBranch, issues_model.PullRequestFlowGithub)
+ if err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ ctx.ServerError("GetUnmergedPullRequest", err)
+ return
+ }
+ } else {
+ ctx.Data["HasPullRequest"] = true
+ if err := pr.LoadIssue(ctx); err != nil {
+ ctx.ServerError("LoadIssue", err)
+ return
+ }
+ ctx.Data["PullRequest"] = pr
+ ctx.HTML(http.StatusOK, tplCompareDiff)
+ return
+ }
+
+ if !nothingToCompare {
+ // Setup information for new form.
+ RetrieveRepoMetas(ctx, ctx.Repo.Repository, true)
+ if ctx.Written() {
+ return
+ }
+ }
+ }
+ beforeCommitID := ctx.Data["BeforeCommitID"].(string)
+ afterCommitID := ctx.Data["AfterCommitID"].(string)
+
+ separator := "..."
+ if ci.DirectComparison {
+ separator = ".."
+ }
+ ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + separator + base.ShortSha(afterCommitID)
+
+ ctx.Data["IsDiffCompare"] = true
+ _, templateErrs := setTemplateIfExists(ctx, pullRequestTemplateKey, pullRequestTemplateCandidates)
+
+ if len(templateErrs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, templateErrs), true)
+ }
+
+ if content, ok := ctx.Data["content"].(string); ok && content != "" {
+ // If a template content is set, prepend the "content". In this case that's only
+ // applicable if you have one commit to compare and that commit has a message.
+ // In that case the commit message will be prepend to the template body.
+ if templateContent, ok := ctx.Data[pullRequestTemplateKey].(string); ok && templateContent != "" {
+ // Reuse the same key as that's prioritized over the "content" key.
+ // Add two new lines between the content to ensure there's always at least
+ // one empty line between them.
+ ctx.Data[pullRequestTemplateKey] = content + "\n\n" + templateContent
+ }
+
+ // When using form fields, also add content to field with id "body".
+ if fields, ok := ctx.Data["Fields"].([]*api.IssueFormField); ok {
+ for _, field := range fields {
+ if field.ID == "body" {
+ if fieldValue, ok := field.Attributes["value"].(string); ok && fieldValue != "" {
+ field.Attributes["value"] = content + "\n\n" + fieldValue
+ } else {
+ field.Attributes["value"] = content
+ }
+ }
+ }
+ }
+ }
+
+ ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanWrite(unit.TypeProjects)
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypePullRequests)
+
+ if unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+ ctx.Data["AllowMaintainerEdit"] = config.DefaultAllowMaintainerEdit
+ } else {
+ ctx.Data["AllowMaintainerEdit"] = false
+ }
+
+ ctx.HTML(http.StatusOK, tplCompare)
+}
+
+// ExcerptBlob render blob excerpt contents
+func ExcerptBlob(ctx *context.Context) {
+ commitID := ctx.Params("sha")
+ lastLeft := ctx.FormInt("last_left")
+ lastRight := ctx.FormInt("last_right")
+ idxLeft := ctx.FormInt("left")
+ idxRight := ctx.FormInt("right")
+ leftHunkSize := ctx.FormInt("left_hunk_size")
+ rightHunkSize := ctx.FormInt("right_hunk_size")
+ anchor := ctx.FormString("anchor")
+ direction := ctx.FormString("direction")
+ filePath := ctx.FormString("path")
+ gitRepo := ctx.Repo.GitRepo
+ if ctx.FormBool("wiki") {
+ var err error
+ gitRepo, err = gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return
+ }
+ defer gitRepo.Close()
+ }
+ chunkSize := gitdiff.BlobExcerptChunkSize
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetCommit")
+ return
+ }
+ section := &gitdiff.DiffSection{
+ FileName: filePath,
+ Name: filePath,
+ }
+ if direction == "up" && (idxLeft-lastLeft) > chunkSize {
+ idxLeft -= chunkSize
+ idxRight -= chunkSize
+ leftHunkSize += chunkSize
+ rightHunkSize += chunkSize
+ section.Lines, err = getExcerptLines(commit, filePath, idxLeft-1, idxRight-1, chunkSize)
+ } else if direction == "down" && (idxLeft-lastLeft) > chunkSize {
+ section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, chunkSize)
+ lastLeft += chunkSize
+ lastRight += chunkSize
+ } else {
+ offset := -1
+ if direction == "down" {
+ offset = 0
+ }
+ section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, idxRight-lastRight+offset)
+ leftHunkSize = 0
+ rightHunkSize = 0
+ idxLeft = lastLeft
+ idxRight = lastRight
+ }
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "getExcerptLines")
+ return
+ }
+ if idxRight > lastRight {
+ lineText := " "
+ if rightHunkSize > 0 || leftHunkSize > 0 {
+ lineText = fmt.Sprintf("@@ -%d,%d +%d,%d @@\n", idxLeft, leftHunkSize, idxRight, rightHunkSize)
+ }
+ lineText = html.EscapeString(lineText)
+ lineSection := &gitdiff.DiffLine{
+ Type: gitdiff.DiffLineSection,
+ Content: lineText,
+ SectionInfo: &gitdiff.DiffLineSectionInfo{
+ Path: filePath,
+ LastLeftIdx: lastLeft,
+ LastRightIdx: lastRight,
+ LeftIdx: idxLeft,
+ RightIdx: idxRight,
+ LeftHunkSize: leftHunkSize,
+ RightHunkSize: rightHunkSize,
+ },
+ }
+ if direction == "up" {
+ section.Lines = append([]*gitdiff.DiffLine{lineSection}, section.Lines...)
+ } else if direction == "down" {
+ section.Lines = append(section.Lines, lineSection)
+ }
+ }
+ ctx.Data["section"] = section
+ ctx.Data["FileNameHash"] = git.HashFilePathForWebUI(filePath)
+ ctx.Data["AfterCommitID"] = commitID
+ ctx.Data["Anchor"] = anchor
+ ctx.HTML(http.StatusOK, tplBlobExcerpt)
+}
+
+func getExcerptLines(commit *git.Commit, filePath string, idxLeft, idxRight, chunkSize int) ([]*gitdiff.DiffLine, error) {
+ blob, err := commit.Tree.GetBlobByPath(filePath)
+ if err != nil {
+ return nil, err
+ }
+ reader, err := blob.DataAsync()
+ if err != nil {
+ return nil, err
+ }
+ defer reader.Close()
+ scanner := bufio.NewScanner(reader)
+ var diffLines []*gitdiff.DiffLine
+ for line := 0; line < idxRight+chunkSize; line++ {
+ if ok := scanner.Scan(); !ok {
+ break
+ }
+ if line < idxRight {
+ continue
+ }
+ lineText := scanner.Text()
+ diffLine := &gitdiff.DiffLine{
+ LeftIdx: idxLeft + (line - idxRight) + 1,
+ RightIdx: line + 1,
+ Type: gitdiff.DiffLinePlain,
+ Content: " " + lineText,
+ }
+ diffLines = append(diffLines, diffLine)
+ }
+ if err = scanner.Err(); err != nil {
+ return nil, fmt.Errorf("getExcerptLines scan: %w", err)
+ }
+ return diffLines, nil
+}
diff --git a/routers/web/repo/contributors.go b/routers/web/repo/contributors.go
new file mode 100644
index 0000000..762fbf9
--- /dev/null
+++ b/routers/web/repo/contributors.go
@@ -0,0 +1,38 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplContributors base.TplName = "repo/activity"
+)
+
+// Contributors render the page to show repository contributors graph
+func Contributors(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.contributors")
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsContributors"] = true
+ ctx.HTML(http.StatusOK, tplContributors)
+}
+
+// ContributorsData renders JSON of contributors along with their weekly commit statistics
+func ContributorsData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("GetContributorStats", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats)
+ }
+}
diff --git a/routers/web/repo/download.go b/routers/web/repo/download.go
new file mode 100644
index 0000000..c4a8bae
--- /dev/null
+++ b/routers/web/repo/download.go
@@ -0,0 +1,170 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "path"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+)
+
+// ServeBlobOrLFS download a git.Blob redirecting to LFS if necessary
+func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob, lastModified *time.Time) error {
+ if httpcache.HandleGenericETagTimeCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) {
+ return nil
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ return err
+ }
+ closed := false
+ defer func() {
+ if closed {
+ return
+ }
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ }()
+
+ pointer, _ := lfs.ReadPointer(dataRc)
+ if pointer.IsValid() {
+ meta, _ := git_model.GetLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, pointer.Oid)
+ if meta == nil {
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ closed = true
+ return common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified)
+ }
+ if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+pointer.Oid+`"`) {
+ return nil
+ }
+
+ if setting.LFS.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.LFS.URL(pointer.RelativePath(), blob.Name())
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return nil
+ }
+ }
+
+ lfsDataRc, err := lfs.ReadMetaObject(meta.Pointer)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err = lfsDataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ }()
+ common.ServeContentByReadSeeker(ctx.Base, ctx.Repo.TreePath, lastModified, lfsDataRc)
+ return nil
+ }
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ closed = true
+
+ return common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified)
+}
+
+func getBlobForEntry(ctx *context.Context) (blob *git.Blob, lastModified *time.Time) {
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetTreeEntryByPath", err)
+ } else {
+ ctx.ServerError("GetTreeEntryByPath", err)
+ }
+ return nil, nil
+ }
+
+ if entry.IsDir() || entry.IsSubModule() {
+ ctx.NotFound("getBlobForEntry", nil)
+ return nil, nil
+ }
+
+ info, _, err := git.Entries([]*git.TreeEntry{entry}).GetCommitsInfo(ctx, ctx.Repo.Commit, path.Dir("/" + ctx.Repo.TreePath)[1:])
+ if err != nil {
+ ctx.ServerError("GetCommitsInfo", err)
+ return nil, nil
+ }
+
+ if len(info) == 1 {
+ // Not Modified
+ lastModified = &info[0].Commit.Committer.When
+ }
+ blob = entry.Blob()
+
+ return blob, lastModified
+}
+
+// SingleDownload download a file by repos path
+func SingleDownload(ctx *context.Context) {
+ blob, lastModified := getBlobForEntry(ctx)
+ if blob == nil {
+ return
+ }
+
+ if err := common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
+
+// SingleDownloadOrLFS download a file by repos path redirecting to LFS if necessary
+func SingleDownloadOrLFS(ctx *context.Context) {
+ blob, lastModified := getBlobForEntry(ctx)
+ if blob == nil {
+ return
+ }
+
+ if err := ServeBlobOrLFS(ctx, blob, lastModified); err != nil {
+ ctx.ServerError("ServeBlobOrLFS", err)
+ }
+}
+
+// DownloadByID download a file by sha1 ID
+func DownloadByID(ctx *context.Context) {
+ blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlob", nil)
+ } else {
+ ctx.ServerError("GetBlob", err)
+ }
+ return
+ }
+ if err = common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
+
+// DownloadByIDOrLFS download a file by sha1 ID taking account of LFS
+func DownloadByIDOrLFS(ctx *context.Context) {
+ blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlob", nil)
+ } else {
+ ctx.ServerError("GetBlob", err)
+ }
+ return
+ }
+ if err = ServeBlobOrLFS(ctx, blob, nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
diff --git a/routers/web/repo/editor.go b/routers/web/repo/editor.go
new file mode 100644
index 0000000..00c3d88
--- /dev/null
+++ b/routers/web/repo/editor.go
@@ -0,0 +1,962 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ files_service "code.gitea.io/gitea/services/repository/files"
+)
+
+const (
+ tplEditFile base.TplName = "repo/editor/edit"
+ tplEditDiffPreview base.TplName = "repo/editor/diff_preview"
+ tplDeleteFile base.TplName = "repo/editor/delete"
+ tplUploadFile base.TplName = "repo/editor/upload"
+
+ frmCommitChoiceDirect string = "direct"
+ frmCommitChoiceNewBranch string = "commit-to-new-branch"
+)
+
+func canCreateBasePullRequest(ctx *context.Context) bool {
+ baseRepo := ctx.Repo.Repository.BaseRepo
+ return baseRepo != nil && baseRepo.UnitEnabled(ctx, unit.TypePullRequests)
+}
+
+func renderCommitRights(ctx *context.Context) bool {
+ canCommitToBranch, err := ctx.Repo.CanCommitToBranch(ctx, ctx.Doer)
+ if err != nil {
+ log.Error("CanCommitToBranch: %v", err)
+ }
+ ctx.Data["CanCommitToBranch"] = canCommitToBranch
+ ctx.Data["CanCreatePullRequest"] = ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) || canCreateBasePullRequest(ctx)
+
+ return canCommitToBranch.CanCommitToBranch
+}
+
+// redirectForCommitChoice redirects after committing the edit to a branch
+func redirectForCommitChoice(ctx *context.Context, commitChoice, newBranchName, treePath string) {
+ if commitChoice == frmCommitChoiceNewBranch {
+ // Redirect to a pull request when possible
+ redirectToPullRequest := false
+ repo := ctx.Repo.Repository
+ baseBranch := ctx.Repo.BranchName
+ headBranch := newBranchName
+ if repo.UnitEnabled(ctx, unit.TypePullRequests) {
+ redirectToPullRequest = true
+ } else if canCreateBasePullRequest(ctx) {
+ redirectToPullRequest = true
+ baseBranch = repo.BaseRepo.DefaultBranch
+ headBranch = repo.Owner.Name + "/" + repo.Name + ":" + headBranch
+ repo = repo.BaseRepo
+ }
+
+ if redirectToPullRequest {
+ ctx.Redirect(repo.Link() + "/compare/" + util.PathEscapeSegments(baseBranch) + "..." + util.PathEscapeSegments(headBranch))
+ return
+ }
+ }
+
+ // Redirect to viewing file or folder
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(newBranchName) + "/" + util.PathEscapeSegments(treePath))
+}
+
+// getParentTreeFields returns list of parent tree names and corresponding tree paths
+// based on given tree path.
+func getParentTreeFields(treePath string) (treeNames, treePaths []string) {
+ if len(treePath) == 0 {
+ return treeNames, treePaths
+ }
+
+ treeNames = strings.Split(treePath, "/")
+ treePaths = make([]string, len(treeNames))
+ for i := range treeNames {
+ treePaths[i] = strings.Join(treeNames[:i+1], "/")
+ }
+ return treeNames, treePaths
+}
+
+// getSelectableEmailAddresses returns which emails can be used by the user as
+// email for a Git commiter.
+func getSelectableEmailAddresses(ctx *context.Context) ([]*user_model.ActivatedEmailAddress, error) {
+ // Retrieve emails that the user could use for commiter identity.
+ commitEmails, err := user_model.GetActivatedEmailAddresses(ctx, ctx.Doer.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetActivatedEmailAddresses: %w", err)
+ }
+
+ // Allow for the placeholder mail to be used. Use -1 as ID to identify
+ // this entry to be the placerholder mail of the user.
+ placeholderMail := &user_model.ActivatedEmailAddress{ID: -1, Email: ctx.Doer.GetPlaceholderEmail()}
+ if ctx.Doer.KeepEmailPrivate {
+ commitEmails = append([]*user_model.ActivatedEmailAddress{placeholderMail}, commitEmails...)
+ } else {
+ commitEmails = append(commitEmails, placeholderMail)
+ }
+
+ return commitEmails, nil
+}
+
+// CommonEditorData sets common context data that is used by the editor.
+func CommonEditorData(ctx *context.Context) {
+ // Set context for selectable email addresses.
+ commitEmails, err := getSelectableEmailAddresses(ctx)
+ if err != nil {
+ ctx.ServerError("getSelectableEmailAddresses", err)
+ return
+ }
+ ctx.Data["CommitMails"] = commitEmails
+ ctx.Data["DefaultCommitMail"] = ctx.Doer.GetEmail()
+}
+
+func editFile(ctx *context.Context, isNewFile bool) {
+ ctx.Data["PageIsEdit"] = true
+ ctx.Data["IsNewFile"] = isNewFile
+ canCommit := renderCommitRights(ctx)
+
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if treePath != ctx.Repo.TreePath {
+ if isNewFile {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_new", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ } else {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_edit", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ }
+ return
+ }
+
+ // Check if the filename (and additional path) is specified in the querystring
+ // (filename is a misnomer, but kept for compatibility with GitHub)
+ filePath, fileName := path.Split(ctx.Req.URL.Query().Get("filename"))
+ filePath = strings.Trim(filePath, "/")
+ treeNames, treePaths := getParentTreeFields(path.Join(ctx.Repo.TreePath, filePath))
+
+ if !isNewFile {
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ // No way to edit a directory online.
+ if entry.IsDir() {
+ ctx.NotFound("entry.IsDir", nil)
+ return
+ }
+
+ blob := entry.Blob()
+ if blob.Size() >= setting.UI.MaxDisplayFileSize {
+ ctx.NotFound("blob.Size", err)
+ return
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ ctx.NotFound("blob.Data", err)
+ return
+ }
+
+ defer dataRc.Close()
+
+ ctx.Data["FileSize"] = blob.Size()
+ ctx.Data["FileName"] = blob.Name()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ // Only some file types are editable online as text.
+ if !typesniffer.DetectContentType(buf).IsRepresentableAsText() {
+ ctx.NotFound("typesniffer.IsRepresentableAsText", nil)
+ return
+ }
+
+ d, _ := io.ReadAll(dataRc)
+
+ buf = append(buf, d...)
+ if content, err := charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true}); err != nil {
+ log.Error("ToUTF8: %v", err)
+ ctx.Data["FileContent"] = string(buf)
+ } else {
+ ctx.Data["FileContent"] = content
+ }
+ } else {
+ // Append filename from query, or empty string to allow user name the new file.
+ treeNames = append(treeNames, fileName)
+ }
+
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["PreviewableExtensions"] = strings.Join(markup.PreviewableExtensions(), ",")
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["EditorconfigJson"] = GetEditorConfig(ctx, treePath)
+
+ ctx.HTML(http.StatusOK, tplEditFile)
+}
+
+// GetEditorConfig returns a editorconfig JSON string for given treePath or "null"
+func GetEditorConfig(ctx *context.Context, treePath string) string {
+ ec, _, err := ctx.Repo.GetEditorconfig()
+ if err == nil {
+ def, err := ec.GetDefinitionForFilename(treePath)
+ if err == nil {
+ jsonStr, _ := json.Marshal(def)
+ return string(jsonStr)
+ }
+ }
+ return "null"
+}
+
+// EditFile render edit file page
+func EditFile(ctx *context.Context) {
+ editFile(ctx, false)
+}
+
+// NewFile render create file page
+func NewFile(ctx *context.Context) {
+ editFile(ctx, true)
+}
+
+func editFilePost(ctx *context.Context, form forms.EditRepoFileForm, isNewFile bool) {
+ canCommit := renderCommitRights(ctx)
+ treeNames, treePaths := getParentTreeFields(form.TreePath)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ ctx.Data["PageIsEdit"] = true
+ ctx.Data["PageHasPosted"] = true
+ ctx.Data["IsNewFile"] = isNewFile
+ ctx.Data["TreePath"] = form.TreePath
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ ctx.Data["FileContent"] = form.Content
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["PreviewableExtensions"] = strings.Join(markup.PreviewableExtensions(), ",")
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["EditorconfigJson"] = GetEditorConfig(ctx, form.TreePath)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplEditFile)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplEditFile, &form)
+ return
+ }
+
+ // CommitSummary is optional in the web form, if empty, give it a default message based on add or update
+ // `message` will be both the summary and message combined
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ if isNewFile {
+ message = ctx.Locale.TrString("repo.editor.add", form.TreePath)
+ } else {
+ message = ctx.Locale.TrString("repo.editor.update", form.TreePath)
+ }
+ }
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ operation := "update"
+ if isNewFile {
+ operation = "create"
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplEditFile, form)
+ if ctx.Written() {
+ return
+ }
+
+ if _, err := files_service.ChangeRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.ChangeRepoFilesOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Files: []*files_service.ChangeRepoFile{
+ {
+ Operation: operation,
+ FromTreePath: ctx.Repo.TreePath,
+ TreePath: form.TreePath,
+ ContentReader: strings.NewReader(strings.ReplaceAll(form.Content, "\r", "")),
+ },
+ },
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ // This is where we handle all the errors thrown by files_service.ChangeRepoFiles
+ if git.IsErrNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_editing_no_longer_exists", ctx.Repo.TreePath), tplEditFile, &form)
+ } else if git_model.IsErrLFSFileLocked(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.upload_file_is_locked", err.(git_model.ErrLFSFileLocked).Path, err.(git_model.ErrLFSFileLocked).UserName), tplEditFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", form.TreePath), tplEditFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ if fileErr, ok := err.(models.ErrFilePathInvalid); ok {
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplEditFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplEditFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplEditFile, &form)
+ default:
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrRepoFileAlreadyExists(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_already_exists", form.TreePath), tplEditFile, &form)
+ } else if git.IsErrBranchNotExist(err) {
+ // For when a user adds/updates a file to a branch that no longer exists
+ if branchErr, ok := err.(git.ErrBranchNotExist); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplEditFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ ctx.Data["Err_NewBranchName"] = true
+ if branchErr, ok := err.(git_model.ErrBranchAlreadyExists); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplEditFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.commit_id_not_matching"), tplEditFile, &form)
+ } else if git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_out_of_date"), tplEditFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplEditFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("editFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplEditFile, &form)
+ }
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.fail_to_update_file", form.TreePath),
+ "Summary": ctx.Tr("repo.editor.fail_to_update_file_summary"),
+ "Details": utils.SanitizeFlashErrorString(err.Error()),
+ })
+ if err != nil {
+ ctx.ServerError("editFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplEditFile, &form)
+ }
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
+ _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
+}
+
+// EditFilePost response for editing file
+func EditFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+ editFilePost(ctx, *form, false)
+}
+
+// NewFilePost response for creating file
+func NewFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+ editFilePost(ctx, *form, true)
+}
+
+// DiffPreviewPost render preview diff page
+func DiffPreviewPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditPreviewDiffForm)
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if len(treePath) == 0 {
+ ctx.Error(http.StatusInternalServerError, "file name to diff is invalid")
+ return
+ }
+
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(treePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetTreeEntryByPath: "+err.Error())
+ return
+ } else if entry.IsDir() {
+ ctx.Error(http.StatusUnprocessableEntity)
+ return
+ }
+
+ diff, err := files_service.GetDiffPreview(ctx, ctx.Repo.Repository, ctx.Repo.BranchName, treePath, form.Content)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetDiffPreview: "+err.Error())
+ return
+ }
+
+ if diff.NumFiles == 0 {
+ ctx.PlainText(http.StatusOK, ctx.Locale.TrString("repo.editor.no_changes_to_show"))
+ return
+ }
+ ctx.Data["File"] = diff.Files[0]
+
+ ctx.HTML(http.StatusOK, tplEditDiffPreview)
+}
+
+// DeleteFile render delete file page
+func DeleteFile(ctx *context.Context) {
+ ctx.Data["PageIsDelete"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+
+ if treePath != ctx.Repo.TreePath {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_delete", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ return
+ }
+
+ ctx.Data["TreePath"] = treePath
+ canCommit := renderCommitRights(ctx)
+
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+
+ ctx.HTML(http.StatusOK, tplDeleteFile)
+}
+
+// DeleteFilePost response for deleting file
+func DeleteFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.DeleteRepoFileForm)
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ ctx.Data["PageIsDelete"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["TreePath"] = ctx.Repo.TreePath
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplDeleteFile)
+ return
+ }
+
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplDeleteFile, &form)
+ return
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ message = ctx.Locale.TrString("repo.editor.delete", ctx.Repo.TreePath)
+ }
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplDeleteFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ if _, err := files_service.ChangeRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.ChangeRepoFilesOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Files: []*files_service.ChangeRepoFile{
+ {
+ Operation: "delete",
+ TreePath: ctx.Repo.TreePath,
+ },
+ },
+ Message: message,
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ // This is where we handle all the errors thrown by repofiles.DeleteRepoFile
+ if git.IsErrNotExist(err) || models.IsErrRepoFileDoesNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_deleting_no_longer_exists", ctx.Repo.TreePath), tplDeleteFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", ctx.Repo.TreePath), tplDeleteFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ if fileErr, ok := err.(models.ErrFilePathInvalid); ok {
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplDeleteFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplDeleteFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplDeleteFile, &form)
+ default:
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ } else {
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ } else if git.IsErrBranchNotExist(err) {
+ // For when a user deletes a file to a branch that no longer exists
+ if branchErr, ok := err.(git.ErrBranchNotExist); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplDeleteFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ if branchErr, ok := err.(git_model.ErrBranchAlreadyExists); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplDeleteFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrCommitIDDoesNotMatch(err) || git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_deleting", ctx.Repo.RepoLink+"/compare/"+util.PathEscapeSegments(form.LastCommit)+"..."+util.PathEscapeSegments(ctx.Repo.CommitID)), tplDeleteFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplDeleteFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("DeleteFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplDeleteFile, &form)
+ }
+ } else {
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.editor.file_delete_success", ctx.Repo.TreePath))
+ treePath := path.Dir(ctx.Repo.TreePath)
+ if treePath == "." {
+ treePath = "" // the file deleted was in the root, so we return the user to the root directory
+ }
+ if len(treePath) > 0 {
+ // Need to get the latest commit since it changed
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
+ if err == nil && commit != nil {
+ // We have the comment, now find what directory we can return the user to
+ // (must have entries)
+ treePath = GetClosestParentWithFiles(treePath, commit)
+ } else {
+ treePath = "" // otherwise return them to the root of the repo
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, treePath)
+}
+
+// UploadFile render upload file page
+func UploadFile(ctx *context.Context) {
+ ctx.Data["PageIsUpload"] = true
+ upload.AddUploadContext(ctx, "repo")
+ canCommit := renderCommitRights(ctx)
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if treePath != ctx.Repo.TreePath {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_upload", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ return
+ }
+ ctx.Repo.TreePath = treePath
+
+ treeNames, treePaths := getParentTreeFields(ctx.Repo.TreePath)
+ if len(treeNames) == 0 {
+ // We must at least have one element for user to input.
+ treeNames = []string{""}
+ }
+
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+
+ ctx.HTML(http.StatusOK, tplUploadFile)
+}
+
+// UploadFilePost response for uploading file
+func UploadFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UploadRepoFileForm)
+ ctx.Data["PageIsUpload"] = true
+ upload.AddUploadContext(ctx, "repo")
+ canCommit := renderCommitRights(ctx)
+
+ oldBranchName := ctx.Repo.BranchName
+ branchName := oldBranchName
+
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ form.TreePath = cleanUploadFileName(form.TreePath)
+
+ treeNames, treePaths := getParentTreeFields(form.TreePath)
+ if len(treeNames) == 0 {
+ // We must at least have one element for user to input.
+ treeNames = []string{""}
+ }
+
+ ctx.Data["TreePath"] = form.TreePath
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName)
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = branchName
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplUploadFile)
+ return
+ }
+
+ if oldBranchName != branchName {
+ if _, err := ctx.Repo.GitRepo.GetBranch(branchName); err == nil {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchName), tplUploadFile, &form)
+ return
+ }
+ } else if !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplUploadFile, &form)
+ return
+ }
+
+ if !ctx.Repo.Repository.IsEmpty {
+ var newTreePath string
+ for _, part := range treeNames {
+ newTreePath = path.Join(newTreePath, part)
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(newTreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ break // Means there is no item with that name, so we're good
+ }
+ ctx.ServerError("Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ // User can only upload files to a directory, the directory name shouldn't be an existing file.
+ if !entry.IsDir() {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", part), tplUploadFile, &form)
+ return
+ }
+ }
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ dir := form.TreePath
+ if dir == "" {
+ dir = "/"
+ }
+ message = ctx.Locale.TrString("repo.editor.upload_files_to_dir", dir)
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplUploadFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ if err := files_service.UploadRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.UploadRepoFileOptions{
+ LastCommitID: ctx.Repo.CommitID,
+ OldBranch: oldBranchName,
+ NewBranch: branchName,
+ TreePath: form.TreePath,
+ Message: message,
+ Files: form.Files,
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ if git_model.IsErrLFSFileLocked(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.upload_file_is_locked", err.(git_model.ErrLFSFileLocked).Path, err.(git_model.ErrLFSFileLocked).UserName), tplUploadFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", form.TreePath), tplUploadFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ fileErr := err.(models.ErrFilePathInvalid)
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplUploadFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplUploadFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplUploadFile, &form)
+ default:
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrRepoFileAlreadyExists(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_already_exists", form.TreePath), tplUploadFile, &form)
+ } else if git.IsErrBranchNotExist(err) {
+ branchErr := err.(git.ErrBranchNotExist)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplUploadFile, &form)
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ ctx.Data["Err_NewBranchName"] = true
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplUploadFile, &form)
+ } else if git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+util.PathEscapeSegments(ctx.Repo.CommitID)+"..."+util.PathEscapeSegments(form.NewBranchName)), tplUploadFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplUploadFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("UploadFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplUploadFile, &form)
+ }
+ } else {
+ // os.ErrNotExist - upload file missing in the intervening time?!
+ log.Error("Error during upload to repo: %-v to filepath: %s on %s from %s: %v", ctx.Repo.Repository, form.TreePath, oldBranchName, form.NewBranchName, err)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.unable_to_upload_files", form.TreePath, err), tplUploadFile, &form)
+ }
+ return
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
+ _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
+}
+
+func cleanUploadFileName(name string) string {
+ // Rebase the filename
+ name = util.PathJoinRel(name)
+ // Git disallows any filenames to have a .git directory in them.
+ for _, part := range strings.Split(name, "/") {
+ if strings.ToLower(part) == ".git" {
+ return ""
+ }
+ }
+ return name
+}
+
+// UploadFileToServer upload file to server file dir not git
+func UploadFileToServer(ctx *context.Context) {
+ file, header, err := ctx.Req.FormFile("file")
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err))
+ return
+ }
+ defer file.Close()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(file, buf)
+ if n > 0 {
+ buf = buf[:n]
+ }
+
+ err = upload.Verify(buf, header.Filename, setting.Repository.Upload.AllowedTypes)
+ if err != nil {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+
+ name := cleanUploadFileName(header.Filename)
+ if len(name) == 0 {
+ ctx.Error(http.StatusInternalServerError, "Upload file name is invalid")
+ return
+ }
+
+ upload, err := repo_model.NewUpload(ctx, name, buf, file)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("NewUpload: %v", err))
+ return
+ }
+
+ log.Trace("New file uploaded: %s", upload.UUID)
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": upload.UUID,
+ })
+}
+
+// RemoveUploadFileFromServer remove file from server file dir
+func RemoveUploadFileFromServer(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RemoveUploadFileForm)
+ if len(form.File) == 0 {
+ ctx.Status(http.StatusNoContent)
+ return
+ }
+
+ if err := repo_model.DeleteUploadByUUID(ctx, form.File); err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("DeleteUploadByUUID: %v", err))
+ return
+ }
+
+ log.Trace("Upload file removed: %s", form.File)
+ ctx.Status(http.StatusNoContent)
+}
+
+// GetUniquePatchBranchName Gets a unique branch name for a new patch branch
+// It will be in the form of <username>-patch-<num> where <num> is the first branch of this format
+// that doesn't already exist. If we exceed 1000 tries or an error is thrown, we just return "" so the user has to
+// type in the branch name themselves (will be an empty field)
+func GetUniquePatchBranchName(ctx *context.Context) string {
+ prefix := ctx.Doer.LowerName + "-patch-"
+ for i := 1; i <= 1000; i++ {
+ branchName := fmt.Sprintf("%s%d", prefix, i)
+ if _, err := ctx.Repo.GitRepo.GetBranch(branchName); err != nil {
+ if git.IsErrBranchNotExist(err) {
+ return branchName
+ }
+ log.Error("GetUniquePatchBranchName: %v", err)
+ return ""
+ }
+ }
+ return ""
+}
+
+// GetClosestParentWithFiles Recursively gets the path of parent in a tree that has files (used when file in a tree is
+// deleted). Returns "" for the root if no parents other than the root have files. If the given treePath isn't a
+// SubTree or it has no entries, we go up one dir and see if we can return the user to that listing.
+func GetClosestParentWithFiles(treePath string, commit *git.Commit) string {
+ if len(treePath) == 0 || treePath == "." {
+ return ""
+ }
+ // see if the tree has entries
+ if tree, err := commit.SubTree(treePath); err != nil {
+ // failed to get tree, going up a dir
+ return GetClosestParentWithFiles(path.Dir(treePath), commit)
+ } else if entries, err := tree.ListEntries(); err != nil || len(entries) == 0 {
+ // no files in this dir, going up a dir
+ return GetClosestParentWithFiles(path.Dir(treePath), commit)
+ }
+ return treePath
+}
+
+// getGitIdentity returns the Git identity that should be used for an Git
+// operation, that takes into account an user's specified email.
+func getGitIdentity(ctx *context.Context, commitMailID int64, tpl base.TplName, form any) *files_service.IdentityOptions {
+ gitIdentity := &files_service.IdentityOptions{
+ Name: ctx.Doer.Name,
+ }
+
+ // -1 is defined as placeholder email.
+ if commitMailID == -1 {
+ gitIdentity.Email = ctx.Doer.GetPlaceholderEmail()
+ } else {
+ // Check if the given email is activated.
+ email, err := user_model.GetEmailAddressByID(ctx, ctx.Doer.ID, commitMailID)
+ if err != nil {
+ ctx.ServerError("GetEmailAddressByID", err)
+ return nil
+ }
+
+ if email == nil || !email.IsActivated {
+ ctx.Data["Err_CommitMailID"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.invalid_commit_mail"), tpl, form)
+ return nil
+ }
+
+ gitIdentity.Email = email.Email
+ }
+
+ return gitIdentity
+}
diff --git a/routers/web/repo/editor_test.go b/routers/web/repo/editor_test.go
new file mode 100644
index 0000000..4d565b5
--- /dev/null
+++ b/routers/web/repo/editor_test.go
@@ -0,0 +1,73 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCleanUploadName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ kases := map[string]string{
+ ".git/refs/master": "",
+ "/root/abc": "root/abc",
+ "./../../abc": "abc",
+ "a/../.git": "",
+ "a/../../../abc": "abc",
+ "../../../acd": "acd",
+ "../../.git/abc": "",
+ "..\\..\\.git/abc": "..\\..\\.git/abc",
+ "..\\../.git/abc": "",
+ "..\\../.git": "",
+ "abc/../def": "def",
+ ".drone.yml": ".drone.yml",
+ ".abc/def/.drone.yml": ".abc/def/.drone.yml",
+ "..drone.yml.": "..drone.yml.",
+ "..a.dotty...name...": "..a.dotty...name...",
+ "..a.dotty../.folder../.name...": "..a.dotty../.folder../.name...",
+ }
+ for k, v := range kases {
+ assert.EqualValues(t, cleanUploadFileName(k), v)
+ }
+}
+
+func TestGetUniquePatchBranchName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1")
+ ctx.SetParams(":id", "1")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ expectedBranchName := "user2-patch-1"
+ branchName := GetUniquePatchBranchName(ctx)
+ assert.Equal(t, expectedBranchName, branchName)
+}
+
+func TestGetClosestParentWithFiles(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ branch := repo.DefaultBranch
+ gitRepo, _ := gitrepo.OpenRepository(git.DefaultContext, repo)
+ defer gitRepo.Close()
+ commit, _ := gitRepo.GetBranchCommit(branch)
+ var expectedTreePath string // Should return the root dir, empty string, since there are no subdirs in this repo
+ for _, deletedFile := range []string{
+ "dir1/dir2/dir3/file.txt",
+ "file.txt",
+ } {
+ treePath := GetClosestParentWithFiles(deletedFile, commit)
+ assert.Equal(t, expectedTreePath, treePath)
+ }
+}
diff --git a/routers/web/repo/find.go b/routers/web/repo/find.go
new file mode 100644
index 0000000..9da4237
--- /dev/null
+++ b/routers/web/repo/find.go
@@ -0,0 +1,24 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplFindFiles base.TplName = "repo/find/files"
+)
+
+// FindFiles render the page to find repository files
+func FindFiles(ctx *context.Context) {
+ path := ctx.Params("*")
+ ctx.Data["TreeLink"] = ctx.Repo.RepoLink + "/src/" + util.PathEscapeSegments(path)
+ ctx.Data["DataLink"] = ctx.Repo.RepoLink + "/tree-list/" + util.PathEscapeSegments(path)
+ ctx.HTML(http.StatusOK, tplFindFiles)
+}
diff --git a/routers/web/repo/flags/manage.go b/routers/web/repo/flags/manage.go
new file mode 100644
index 0000000..377a5c2
--- /dev/null
+++ b/routers/web/repo/flags/manage.go
@@ -0,0 +1,49 @@
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package flags
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplRepoFlags base.TplName = "repo/flags"
+)
+
+func Manage(ctx *context.Context) {
+ ctx.Data["IsRepoFlagsPage"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.admin.manage_flags")
+
+ flags := map[string]bool{}
+ for _, f := range setting.Repository.SettableFlags {
+ flags[f] = false
+ }
+ repoFlags, _ := ctx.Repo.Repository.ListFlags(ctx)
+ for _, f := range repoFlags {
+ flags[f.Name] = true
+ }
+
+ ctx.Data["Flags"] = flags
+
+ ctx.HTML(http.StatusOK, tplRepoFlags)
+}
+
+func ManagePost(ctx *context.Context) {
+ newFlags := ctx.FormStrings("flags")
+
+ err := ctx.Repo.Repository.ReplaceAllFlags(ctx, newFlags)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.admin.failed_to_replace_flags"))
+ log.Error("Error replacing repository flags for repo %d: %v", ctx.Repo.Repository.ID, err)
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.admin.flags_replaced"))
+ }
+
+ ctx.Redirect(ctx.Repo.Repository.HTMLURL() + "/flags")
+}
diff --git a/routers/web/repo/githttp.go b/routers/web/repo/githttp.go
new file mode 100644
index 0000000..a082498
--- /dev/null
+++ b/routers/web/repo/githttp.go
@@ -0,0 +1,599 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "compress/gzip"
+ gocontext "context"
+ "fmt"
+ "net/http"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/go-chi/cors"
+)
+
+func HTTPGitEnabledHandler(ctx *context.Context) {
+ if setting.Repository.DisableHTTPGit {
+ ctx.Resp.WriteHeader(http.StatusForbidden)
+ _, _ = ctx.Resp.Write([]byte("Interacting with repositories by HTTP protocol is not allowed"))
+ }
+}
+
+func CorsHandler() func(next http.Handler) http.Handler {
+ if setting.Repository.AccessControlAllowOrigin != "" {
+ return cors.Handler(cors.Options{
+ AllowedOrigins: []string{setting.Repository.AccessControlAllowOrigin},
+ AllowedHeaders: []string{"Content-Type", "Authorization", "User-Agent"},
+ })
+ }
+ return func(next http.Handler) http.Handler {
+ return next
+ }
+}
+
+// httpBase implementation git smart HTTP protocol
+func httpBase(ctx *context.Context) *serviceHandler {
+ username := ctx.Params(":username")
+ reponame := strings.TrimSuffix(ctx.Params(":reponame"), ".git")
+
+ if ctx.FormString("go-get") == "1" {
+ context.EarlyResponseForGoGetMeta(ctx)
+ return nil
+ }
+
+ var isPull, receivePack bool
+ service := ctx.FormString("service")
+ if service == "git-receive-pack" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-receive-pack") {
+ isPull = false
+ receivePack = true
+ } else if service == "git-upload-pack" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-upload-pack") {
+ isPull = true
+ } else if service == "git-upload-archive" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-upload-archive") {
+ isPull = true
+ } else {
+ isPull = ctx.Req.Method == "GET"
+ }
+
+ var accessMode perm.AccessMode
+ if isPull {
+ accessMode = perm.AccessModeRead
+ } else {
+ accessMode = perm.AccessModeWrite
+ }
+
+ isWiki := false
+ unitType := unit.TypeCode
+
+ if strings.HasSuffix(reponame, ".wiki") {
+ isWiki = true
+ unitType = unit.TypeWiki
+ reponame = reponame[:len(reponame)-5]
+ }
+
+ owner := ctx.ContextUser
+ if !owner.IsOrganization() && !owner.IsActive {
+ ctx.PlainText(http.StatusForbidden, "Repository cannot be accessed. You cannot push or open issues/pull-requests.")
+ return nil
+ }
+
+ repoExist := true
+ repo, err := repo_model.GetRepositoryByName(ctx, owner.ID, reponame)
+ if err != nil {
+ if !repo_model.IsErrRepoNotExist(err) {
+ ctx.ServerError("GetRepositoryByName", err)
+ return nil
+ }
+
+ if redirectRepoID, err := repo_model.LookupRedirect(ctx, owner.ID, reponame); err == nil {
+ context.RedirectToRepo(ctx.Base, redirectRepoID)
+ return nil
+ }
+ repoExist = false
+ }
+
+ // Don't allow pushing if the repo is archived
+ if repoExist && repo.IsArchived && !isPull {
+ ctx.PlainText(http.StatusForbidden, "This repo is archived. You can view files and clone it, but cannot push or open issues/pull-requests.")
+ return nil
+ }
+
+ // Only public pull don't need auth.
+ isPublicPull := repoExist && !repo.IsPrivate && isPull
+ var (
+ askAuth = !isPublicPull || setting.Service.RequireSignInView
+ environ []string
+ )
+
+ // don't allow anonymous pulls if organization is not public
+ if isPublicPull {
+ if err := repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return nil
+ }
+
+ askAuth = askAuth || (repo.Owner.Visibility != structs.VisibleTypePublic)
+ }
+
+ // check access
+ if askAuth {
+ // rely on the results of Contexter
+ if !ctx.IsSigned {
+ // TODO: support digit auth - which would be Authorization header with digit
+ ctx.Resp.Header().Set("WWW-Authenticate", `Basic realm="Gitea"`)
+ ctx.Error(http.StatusUnauthorized)
+ return nil
+ }
+
+ context.CheckRepoScopedToken(ctx, repo, auth_model.GetScopeLevelFromAccessMode(accessMode))
+ if ctx.Written() {
+ return nil
+ }
+
+ if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true && ctx.Data["IsActionsToken"] != true {
+ _, err = auth_model.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if err == nil {
+ // TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented
+ ctx.PlainText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page")
+ return nil
+ } else if !auth_model.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("IsErrTwoFactorNotEnrolled", err)
+ return nil
+ }
+ }
+
+ if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin {
+ ctx.PlainText(http.StatusForbidden, "Your account is disabled.")
+ return nil
+ }
+
+ environ = []string{
+ repo_module.EnvRepoUsername + "=" + username,
+ repo_module.EnvRepoName + "=" + reponame,
+ repo_module.EnvPusherName + "=" + ctx.Doer.Name,
+ repo_module.EnvPusherID + fmt.Sprintf("=%d", ctx.Doer.ID),
+ repo_module.EnvAppURL + "=" + setting.AppURL,
+ }
+
+ if repoExist {
+ // Because of special ref "refs/for" .. , need delay write permission check
+ if git.SupportProcReceive {
+ accessMode = perm.AccessModeRead
+ }
+
+ if ctx.Data["IsActionsToken"] == true {
+ taskID := ctx.Data["ActionsTaskID"].(int64)
+ task, err := actions_model.GetTaskByID(ctx, taskID)
+ if err != nil {
+ ctx.ServerError("GetTaskByID", err)
+ return nil
+ }
+ if task.RepoID != repo.ID {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+
+ if task.IsForkPullRequest {
+ if accessMode > perm.AccessModeRead {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+ environ = append(environ, fmt.Sprintf("%s=%d", repo_module.EnvActionPerm, perm.AccessModeRead))
+ } else {
+ if accessMode > perm.AccessModeWrite {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+ environ = append(environ, fmt.Sprintf("%s=%d", repo_module.EnvActionPerm, perm.AccessModeWrite))
+ }
+ } else {
+ p, err := access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+
+ if !p.CanAccess(accessMode, unitType) {
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+ }
+
+ if !isPull && repo.IsMirror {
+ ctx.PlainText(http.StatusForbidden, "mirror repository is read-only")
+ return nil
+ }
+ }
+
+ if !ctx.Doer.KeepEmailPrivate {
+ environ = append(environ, repo_module.EnvPusherEmail+"="+ctx.Doer.Email)
+ }
+
+ if isWiki {
+ environ = append(environ, repo_module.EnvRepoIsWiki+"=true")
+ } else {
+ environ = append(environ, repo_module.EnvRepoIsWiki+"=false")
+ }
+ }
+
+ if !repoExist {
+ if !receivePack {
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+
+ if isWiki { // you cannot send wiki operation before create the repository
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+
+ if owner.IsOrganization() && !setting.Repository.EnablePushCreateOrg {
+ ctx.PlainText(http.StatusForbidden, "Push to create is not enabled for organizations.")
+ return nil
+ }
+ if !owner.IsOrganization() && !setting.Repository.EnablePushCreateUser {
+ ctx.PlainText(http.StatusForbidden, "Push to create is not enabled for users.")
+ return nil
+ }
+
+ // Return dummy payload if GET receive-pack
+ if ctx.Req.Method == http.MethodGet {
+ dummyInfoRefs(ctx)
+ return nil
+ }
+
+ repo, err = repo_service.PushCreateRepo(ctx, ctx.Doer, owner, reponame)
+ if err != nil {
+ log.Error("pushCreateRepo: %v", err)
+ ctx.Status(http.StatusNotFound)
+ return nil
+ }
+ }
+
+ if isWiki {
+ // Ensure the wiki is enabled before we allow access to it
+ if _, err := repo.GetUnit(ctx, unit.TypeWiki); err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ ctx.PlainText(http.StatusForbidden, "repository wiki is disabled")
+ return nil
+ }
+ log.Error("Failed to get the wiki unit in %-v Error: %v", repo, err)
+ ctx.ServerError("GetUnit(UnitTypeWiki) for "+repo.FullName(), err)
+ return nil
+ }
+ }
+
+ environ = append(environ, repo_module.EnvRepoID+fmt.Sprintf("=%d", repo.ID))
+
+ ctx.Req.URL.Path = strings.ToLower(ctx.Req.URL.Path) // blue: In case some repo name has upper case name
+
+ return &serviceHandler{repo, isWiki, environ}
+}
+
+var (
+ infoRefsCache []byte
+ infoRefsOnce sync.Once
+)
+
+func dummyInfoRefs(ctx *context.Context) {
+ infoRefsOnce.Do(func() {
+ tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-info-refs-cache")
+ if err != nil {
+ log.Error("Failed to create temp dir for git-receive-pack cache: %v", err)
+ return
+ }
+
+ defer func() {
+ if err := util.RemoveAll(tmpDir); err != nil {
+ log.Error("RemoveAll: %v", err)
+ }
+ }()
+
+ if err := git.InitRepository(ctx, tmpDir, true, git.Sha1ObjectFormat.Name()); err != nil {
+ log.Error("Failed to init bare repo for git-receive-pack cache: %v", err)
+ return
+ }
+
+ refs, _, err := git.NewCommand(ctx, "receive-pack", "--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Dir: tmpDir})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
+ }
+
+ log.Debug("populating infoRefsCache: \n%s", string(refs))
+ infoRefsCache = refs
+ })
+
+ ctx.RespHeader().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT")
+ ctx.RespHeader().Set("Pragma", "no-cache")
+ ctx.RespHeader().Set("Cache-Control", "no-cache, max-age=0, must-revalidate")
+ ctx.RespHeader().Set("Content-Type", "application/x-git-receive-pack-advertisement")
+ _, _ = ctx.Write(packetWrite("# service=git-receive-pack\n"))
+ _, _ = ctx.Write([]byte("0000"))
+ _, _ = ctx.Write(infoRefsCache)
+}
+
+type serviceHandler struct {
+ repo *repo_model.Repository
+ isWiki bool
+ environ []string
+}
+
+func (h *serviceHandler) getRepoDir() string {
+ if h.isWiki {
+ return h.repo.WikiPath()
+ }
+ return h.repo.RepoPath()
+}
+
+func setHeaderNoCache(ctx *context.Context) {
+ ctx.Resp.Header().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT")
+ ctx.Resp.Header().Set("Pragma", "no-cache")
+ ctx.Resp.Header().Set("Cache-Control", "no-cache, max-age=0, must-revalidate")
+}
+
+func setHeaderCacheForever(ctx *context.Context) {
+ now := time.Now().Unix()
+ expires := now + 31536000
+ ctx.Resp.Header().Set("Date", fmt.Sprintf("%d", now))
+ ctx.Resp.Header().Set("Expires", fmt.Sprintf("%d", expires))
+ ctx.Resp.Header().Set("Cache-Control", "public, max-age=31536000")
+}
+
+func containsParentDirectorySeparator(v string) bool {
+ if !strings.Contains(v, "..") {
+ return false
+ }
+ for _, ent := range strings.FieldsFunc(v, isSlashRune) {
+ if ent == ".." {
+ return true
+ }
+ }
+ return false
+}
+
+func isSlashRune(r rune) bool { return r == '/' || r == '\\' }
+
+func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string) {
+ if containsParentDirectorySeparator(file) {
+ log.Error("request file path contains invalid path: %v", file)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return
+ }
+ reqFile := filepath.Join(h.getRepoDir(), file)
+
+ fi, err := os.Stat(reqFile)
+ if os.IsNotExist(err) {
+ ctx.Resp.WriteHeader(http.StatusNotFound)
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", contentType)
+ ctx.Resp.Header().Set("Content-Length", fmt.Sprintf("%d", fi.Size()))
+ // http.TimeFormat required a UTC time, refer to https://pkg.go.dev/net/http#TimeFormat
+ ctx.Resp.Header().Set("Last-Modified", fi.ModTime().UTC().Format(http.TimeFormat))
+ http.ServeFile(ctx.Resp, ctx.Req, reqFile)
+}
+
+// one or more key=value pairs separated by colons
+var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
+
+func prepareGitCmdWithAllowedService(ctx *context.Context, service string) (*git.Command, error) {
+ if service == "receive-pack" {
+ return git.NewCommand(ctx, "receive-pack"), nil
+ }
+ if service == "upload-pack" {
+ return git.NewCommand(ctx, "upload-pack"), nil
+ }
+
+ return nil, fmt.Errorf("service %q is not allowed", service)
+}
+
+func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
+ defer func() {
+ if err := ctx.Req.Body.Close(); err != nil {
+ log.Error("serviceRPC: Close: %v", err)
+ }
+ }()
+
+ expectedContentType := fmt.Sprintf("application/x-git-%s-request", service)
+ if ctx.Req.Header.Get("Content-Type") != expectedContentType {
+ log.Error("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType)
+ ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ return
+ }
+
+ cmd, err := prepareGitCmdWithAllowedService(ctx, service)
+ if err != nil {
+ log.Error("Failed to prepareGitCmdWithService: %v", err)
+ ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service))
+
+ reqBody := ctx.Req.Body
+
+ // Handle GZIP.
+ if ctx.Req.Header.Get("Content-Encoding") == "gzip" {
+ reqBody, err = gzip.NewReader(reqBody)
+ if err != nil {
+ log.Error("Fail to create gzip reader: %v", err)
+ ctx.Resp.WriteHeader(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ // set this for allow pre-receive and post-receive execute
+ h.environ = append(h.environ, "SSH_ORIGINAL_COMMAND="+service)
+
+ if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
+ h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
+ }
+
+ var stderr bytes.Buffer
+ cmd.AddArguments("--stateless-rpc").AddDynamicArguments(h.getRepoDir())
+ cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.getRepoDir()))
+ if err := cmd.Run(&git.RunOpts{
+ Dir: h.getRepoDir(),
+ Env: append(os.Environ(), h.environ...),
+ Stdout: ctx.Resp,
+ Stdin: reqBody,
+ Stderr: &stderr,
+ UseContextTimeout: true,
+ }); err != nil {
+ if err.Error() != "signal: killed" {
+ log.Error("Fail to serve RPC(%s) in %s: %v - %s", service, h.getRepoDir(), err, stderr.String())
+ }
+ return
+ }
+}
+
+// ServiceUploadPack implements Git Smart HTTP protocol
+func ServiceUploadPack(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ serviceRPC(ctx, h, "upload-pack")
+ }
+}
+
+// ServiceReceivePack implements Git Smart HTTP protocol
+func ServiceReceivePack(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ serviceRPC(ctx, h, "receive-pack")
+ }
+}
+
+func getServiceType(ctx *context.Context) string {
+ serviceType := ctx.Req.FormValue("service")
+ if !strings.HasPrefix(serviceType, "git-") {
+ return ""
+ }
+ return strings.TrimPrefix(serviceType, "git-")
+}
+
+func updateServerInfo(ctx gocontext.Context, dir string) []byte {
+ out, _, err := git.NewCommand(ctx, "update-server-info").RunStdBytes(&git.RunOpts{Dir: dir})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(out)))
+ }
+ return out
+}
+
+func packetWrite(str string) []byte {
+ s := strconv.FormatInt(int64(len(str)+4), 16)
+ if len(s)%4 != 0 {
+ s = strings.Repeat("0", 4-len(s)%4) + s
+ }
+ return []byte(s + str)
+}
+
+// GetInfoRefs implements Git dumb HTTP
+func GetInfoRefs(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h == nil {
+ return
+ }
+ setHeaderNoCache(ctx)
+ service := getServiceType(ctx)
+ cmd, err := prepareGitCmdWithAllowedService(ctx, service)
+ if err == nil {
+ if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
+ h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
+ }
+ h.environ = append(os.Environ(), h.environ...)
+
+ refs, _, err := cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.getRepoDir()})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
+ }
+
+ ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service))
+ ctx.Resp.WriteHeader(http.StatusOK)
+ _, _ = ctx.Resp.Write(packetWrite("# service=git-" + service + "\n"))
+ _, _ = ctx.Resp.Write([]byte("0000"))
+ _, _ = ctx.Resp.Write(refs)
+ } else {
+ updateServerInfo(ctx, h.getRepoDir())
+ h.sendFile(ctx, "text/plain; charset=utf-8", "info/refs")
+ }
+}
+
+// GetTextFile implements Git dumb HTTP
+func GetTextFile(p string) func(*context.Context) {
+ return func(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderNoCache(ctx)
+ file := ctx.Params("file")
+ if file != "" {
+ h.sendFile(ctx, "text/plain", "objects/info/"+file)
+ } else {
+ h.sendFile(ctx, "text/plain", p)
+ }
+ }
+ }
+}
+
+// GetInfoPacks implements Git dumb HTTP
+func GetInfoPacks(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "text/plain; charset=utf-8", "objects/info/packs")
+ }
+}
+
+// GetLooseObject implements Git dumb HTTP
+func GetLooseObject(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-loose-object", fmt.Sprintf("objects/%s/%s",
+ ctx.Params("head"), ctx.Params("hash")))
+ }
+}
+
+// GetPackFile implements Git dumb HTTP
+func GetPackFile(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-packed-objects", "objects/pack/pack-"+ctx.Params("file")+".pack")
+ }
+}
+
+// GetIdxFile implements Git dumb HTTP
+func GetIdxFile(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-packed-objects-toc", "objects/pack/pack-"+ctx.Params("file")+".idx")
+ }
+}
diff --git a/routers/web/repo/githttp_test.go b/routers/web/repo/githttp_test.go
new file mode 100644
index 0000000..5ba8de3
--- /dev/null
+++ b/routers/web/repo/githttp_test.go
@@ -0,0 +1,42 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestContainsParentDirectorySeparator(t *testing.T) {
+ tests := []struct {
+ v string
+ b bool
+ }{
+ {
+ v: `user2/repo1/info/refs`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/HEAD`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/some.../strange_file...mp3`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/../../custom/conf/app.ini`,
+ b: true,
+ },
+ {
+ v: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`,
+ b: true,
+ },
+ }
+
+ for i := range tests {
+ assert.EqualValues(t, tests[i].b, containsParentDirectorySeparator(tests[i].v))
+ }
+}
diff --git a/routers/web/repo/helper.go b/routers/web/repo/helper.go
new file mode 100644
index 0000000..5e1e116
--- /dev/null
+++ b/routers/web/repo/helper.go
@@ -0,0 +1,44 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/url"
+ "sort"
+
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+)
+
+func MakeSelfOnTop(doer *user.User, users []*user.User) []*user.User {
+ if doer != nil {
+ sort.Slice(users, func(i, j int) bool {
+ if users[i].ID == users[j].ID {
+ return false
+ }
+ return users[i].ID == doer.ID // if users[i] is self, put it before others, so less=true
+ })
+ }
+ return users
+}
+
+func HandleGitError(ctx *context.Context, msg string, err error) {
+ if git.IsErrNotExist(err) {
+ refType := ""
+ switch {
+ case ctx.Repo.IsViewBranch:
+ refType = "branch"
+ case ctx.Repo.IsViewTag:
+ refType = "tag"
+ case ctx.Repo.IsViewCommit:
+ refType = "commit"
+ }
+ ctx.Data["NotFoundPrompt"] = ctx.Locale.Tr("repo.tree_path_not_found_"+refType, ctx.Repo.TreePath, url.PathEscape(ctx.Repo.RefName))
+ ctx.Data["NotFoundGoBackURL"] = ctx.Repo.RepoLink + "/src/" + refType + "/" + url.PathEscape(ctx.Repo.RefName)
+ ctx.NotFound(msg, err)
+ } else {
+ ctx.ServerError(msg, err)
+ }
+}
diff --git a/routers/web/repo/helper_test.go b/routers/web/repo/helper_test.go
new file mode 100644
index 0000000..978758e
--- /dev/null
+++ b/routers/web/repo/helper_test.go
@@ -0,0 +1,26 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMakeSelfOnTop(t *testing.T) {
+ users := MakeSelfOnTop(nil, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 2, users[0].ID)
+
+ users = MakeSelfOnTop(&user.User{ID: 1}, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 1, users[0].ID)
+
+ users = MakeSelfOnTop(&user.User{ID: 2}, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 2, users[0].ID)
+}
diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go
new file mode 100644
index 0000000..5d13ccc
--- /dev/null
+++ b/routers/web/repo/issue.go
@@ -0,0 +1,3822 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ stdCtx "context"
+ "errors"
+ "fmt"
+ "html/template"
+ "math/big"
+ "net/http"
+ "net/url"
+ "slices"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ pull_model "code.gitea.io/gitea/models/pull"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/git"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ issue_template "code.gitea.io/gitea/modules/issue/template"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/templates/vars"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ issue_service "code.gitea.io/gitea/services/issue"
+ pull_service "code.gitea.io/gitea/services/pull"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "gitea.com/go-chi/binding"
+)
+
+const (
+ tplAttachment base.TplName = "repo/issue/view_content/attachments"
+
+ tplIssues base.TplName = "repo/issue/list"
+ tplIssueNew base.TplName = "repo/issue/new"
+ tplIssueChoose base.TplName = "repo/issue/choose"
+ tplIssueView base.TplName = "repo/issue/view"
+
+ tplReactions base.TplName = "repo/issue/view_content/reactions"
+
+ issueTemplateKey = "IssueTemplate"
+ issueTemplateTitleKey = "IssueTemplateTitle"
+)
+
+// IssueTemplateCandidates issue templates
+var IssueTemplateCandidates = []string{
+ "ISSUE_TEMPLATE.md",
+ "ISSUE_TEMPLATE.yaml",
+ "ISSUE_TEMPLATE.yml",
+ "issue_template.md",
+ "issue_template.yaml",
+ "issue_template.yml",
+ ".forgejo/ISSUE_TEMPLATE.md",
+ ".forgejo/ISSUE_TEMPLATE.yaml",
+ ".forgejo/ISSUE_TEMPLATE.yml",
+ ".forgejo/issue_template.md",
+ ".forgejo/issue_template.yaml",
+ ".forgejo/issue_template.yml",
+ ".gitea/ISSUE_TEMPLATE.md",
+ ".gitea/ISSUE_TEMPLATE.yaml",
+ ".gitea/ISSUE_TEMPLATE.yml",
+ ".gitea/issue_template.md",
+ ".gitea/issue_template.yaml",
+ ".gitea/issue_template.yml",
+ ".github/ISSUE_TEMPLATE.md",
+ ".github/ISSUE_TEMPLATE.yaml",
+ ".github/ISSUE_TEMPLATE.yml",
+ ".github/issue_template.md",
+ ".github/issue_template.yaml",
+ ".github/issue_template.yml",
+}
+
+// MustAllowUserComment checks to make sure if an issue is locked.
+// If locked and user has permissions to write to the repository,
+// then the comment is allowed, else it is blocked
+func MustAllowUserComment(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin {
+ ctx.Flash.Error(ctx.Tr("repo.issues.comment_on_locked"))
+ ctx.Redirect(issue.Link())
+ return
+ }
+}
+
+// MustEnableIssues check if repository enable internal issues
+func MustEnableIssues(ctx *context.Context) {
+ if !ctx.Repo.CanRead(unit.TypeIssues) &&
+ !ctx.Repo.CanRead(unit.TypeExternalTracker) {
+ ctx.NotFound("MustEnableIssues", nil)
+ return
+ }
+
+ unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil {
+ ctx.Redirect(unit.ExternalTrackerConfig().ExternalTrackerURL)
+ return
+ }
+}
+
+// MustAllowPulls check if repository enable pull requests and user have right to do that
+func MustAllowPulls(ctx *context.Context) {
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.NotFound("MustAllowPulls", nil)
+ return
+ }
+
+ // User can send pull request if owns a forked repository.
+ if ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID) {
+ ctx.Repo.PullRequest.Allowed = true
+ ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.Doer.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ }
+}
+
+func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption optional.Option[bool]) {
+ var err error
+ viewType := ctx.FormString("type")
+ sortType := ctx.FormString("sort")
+ types := []string{"all", "your_repositories", "assigned", "created_by", "mentioned", "review_requested", "reviewed_by"}
+ if !util.SliceContainsString(types, viewType, true) {
+ viewType = "all"
+ }
+
+ var (
+ assigneeID = ctx.FormInt64("assignee")
+ posterID = ctx.FormInt64("poster")
+ mentionedID int64
+ reviewRequestedID int64
+ reviewedID int64
+ )
+
+ if ctx.IsSigned {
+ switch viewType {
+ case "created_by":
+ posterID = ctx.Doer.ID
+ case "mentioned":
+ mentionedID = ctx.Doer.ID
+ case "assigned":
+ assigneeID = ctx.Doer.ID
+ case "review_requested":
+ reviewRequestedID = ctx.Doer.ID
+ case "reviewed_by":
+ reviewedID = ctx.Doer.ID
+ }
+ }
+
+ repo := ctx.Repo.Repository
+ var labelIDs []int64
+ // 1,-2 means including label 1 and excluding label 2
+ // 0 means issues with no label
+ // blank means labels will not be filtered for issues
+ selectLabels := ctx.FormString("labels")
+ if selectLabels == "" {
+ ctx.Data["AllLabels"] = true
+ } else if selectLabels == "0" {
+ ctx.Data["NoLabel"] = true
+ }
+ if len(selectLabels) > 0 {
+ labelIDs, err = base.StringsToInt64s(strings.Split(selectLabels, ","))
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("invalid_data", selectLabels), true)
+ }
+ }
+
+ keyword := strings.Trim(ctx.FormString("q"), " ")
+ if bytes.Contains([]byte(keyword), []byte{0x00}) {
+ keyword = ""
+ }
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ var mileIDs []int64
+ if milestoneID > 0 || milestoneID == db.NoConditionID { // -1 to get those issues which have no any milestone assigned
+ mileIDs = []int64{milestoneID}
+ }
+
+ var issueStats *issues_model.IssueStats
+ statsOpts := &issues_model.IssuesOptions{
+ RepoIDs: []int64{repo.ID},
+ LabelIDs: labelIDs,
+ MilestoneIDs: mileIDs,
+ ProjectID: projectID,
+ AssigneeID: assigneeID,
+ MentionedID: mentionedID,
+ PosterID: posterID,
+ ReviewRequestedID: reviewRequestedID,
+ ReviewedID: reviewedID,
+ IsPull: isPullOption,
+ IssueIDs: nil,
+ }
+ if keyword != "" {
+ allIssueIDs, err := issueIDsFromSearch(ctx, keyword, isFuzzy, statsOpts)
+ if err != nil {
+ if issue_indexer.IsAvailable(ctx) {
+ ctx.ServerError("issueIDsFromSearch", err)
+ return
+ }
+ ctx.Data["IssueIndexerUnavailable"] = true
+ return
+ }
+ statsOpts.IssueIDs = allIssueIDs
+ }
+ if keyword != "" && len(statsOpts.IssueIDs) == 0 {
+ // So it did search with the keyword, but no issue found.
+ // Just set issueStats to empty.
+ issueStats = &issues_model.IssueStats{}
+ } else {
+ // So it did search with the keyword, and found some issues. It needs to get issueStats of these issues.
+ // Or the keyword is empty, so it doesn't need issueIDs as filter, just get issueStats with statsOpts.
+ issueStats, err = issues_model.GetIssueStats(ctx, statsOpts)
+ if err != nil {
+ ctx.ServerError("GetIssueStats", err)
+ return
+ }
+ }
+
+ var isShowClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isShowClosed = optional.Some(true)
+ case "all":
+ isShowClosed = optional.None[bool]()
+ default:
+ isShowClosed = optional.Some(false)
+ }
+ // if there are closed issues and no open issues, default to showing all issues
+ if len(ctx.FormString("state")) == 0 && issueStats.OpenCount == 0 && issueStats.ClosedCount != 0 {
+ isShowClosed = optional.None[bool]()
+ }
+
+ if repo.IsTimetrackerEnabled(ctx) {
+ totalTrackedTime, err := issues_model.GetIssueTotalTrackedTime(ctx, statsOpts, isShowClosed)
+ if err != nil {
+ ctx.ServerError("GetIssueTotalTrackedTime", err)
+ return
+ }
+ ctx.Data["TotalTrackedTime"] = totalTrackedTime
+ }
+
+ archived := ctx.FormBool("archived")
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ var total int
+ switch {
+ case isShowClosed.Value():
+ total = int(issueStats.ClosedCount)
+ case !isShowClosed.Has():
+ total = int(issueStats.OpenCount + issueStats.ClosedCount)
+ default:
+ total = int(issueStats.OpenCount)
+ }
+ pager := context.NewPagination(total, setting.UI.IssuePagingNum, page, 5)
+
+ var issues issues_model.IssueList
+ {
+ ids, err := issueIDsFromSearch(ctx, keyword, isFuzzy, &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoIDs: []int64{repo.ID},
+ AssigneeID: assigneeID,
+ PosterID: posterID,
+ MentionedID: mentionedID,
+ ReviewRequestedID: reviewRequestedID,
+ ReviewedID: reviewedID,
+ MilestoneIDs: mileIDs,
+ ProjectID: projectID,
+ IsClosed: isShowClosed,
+ IsPull: isPullOption,
+ LabelIDs: labelIDs,
+ SortType: sortType,
+ })
+ if err != nil {
+ if issue_indexer.IsAvailable(ctx) {
+ ctx.ServerError("issueIDsFromSearch", err)
+ return
+ }
+ ctx.Data["IssueIndexerUnavailable"] = true
+ return
+ }
+ issues, err = issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ ctx.ServerError("GetIssuesByIDs", err)
+ return
+ }
+ }
+
+ approvalCounts, err := issues.GetApprovalCounts(ctx)
+ if err != nil {
+ ctx.ServerError("ApprovalCounts", err)
+ return
+ }
+
+ if ctx.IsSigned {
+ if err := issues.LoadIsRead(ctx, ctx.Doer.ID); err != nil {
+ ctx.ServerError("LoadIsRead", err)
+ return
+ }
+ } else {
+ for i := range issues {
+ issues[i].IsRead = true
+ }
+ }
+
+ commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues)
+ if err != nil {
+ ctx.ServerError("GetIssuesAllCommitStatus", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+
+ if err := issues.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("issues.LoadAttributes", err)
+ return
+ }
+
+ ctx.Data["Issues"] = issues
+ ctx.Data["CommitLastStatus"] = lastStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+
+ // Get assignees.
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return
+ }
+
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+
+ ctx.Data["OrgLabels"] = orgLabels
+ labels = append(labels, orgLabels...)
+ }
+
+ // Get the exclusive scope for every label ID
+ labelExclusiveScopes := make([]string, 0, len(labelIDs))
+ for _, labelID := range labelIDs {
+ foundExclusiveScope := false
+ for _, label := range labels {
+ if label.ID == labelID || label.ID == -labelID {
+ labelExclusiveScopes = append(labelExclusiveScopes, label.ExclusiveScope())
+ foundExclusiveScope = true
+ break
+ }
+ }
+ if !foundExclusiveScope {
+ labelExclusiveScopes = append(labelExclusiveScopes, "")
+ }
+ }
+
+ for _, l := range labels {
+ l.LoadSelectedLabelsAfterClick(labelIDs, labelExclusiveScopes)
+ }
+ ctx.Data["Labels"] = labels
+ ctx.Data["NumLabels"] = len(labels)
+
+ if ctx.FormInt64("assignee") == 0 {
+ assigneeID = 0 // Reset ID to prevent unexpected selection of assignee.
+ }
+
+ ctx.Data["IssueRefEndNames"], ctx.Data["IssueRefURLs"] = issue_service.GetRefEndNamesAndURLs(issues, ctx.Repo.RepoLink)
+
+ ctx.Data["ApprovalCounts"] = func(issueID int64, typ string) int64 {
+ counts, ok := approvalCounts[issueID]
+ if !ok || len(counts) == 0 {
+ return 0
+ }
+ reviewTyp := issues_model.ReviewTypeApprove
+ if typ == "reject" {
+ reviewTyp = issues_model.ReviewTypeReject
+ } else if typ == "waiting" {
+ reviewTyp = issues_model.ReviewTypeRequest
+ }
+ for _, count := range counts {
+ if count.Type == reviewTyp {
+ return count.Count
+ }
+ }
+ return 0
+ }
+
+ retrieveProjects(ctx, repo)
+ if ctx.Written() {
+ return
+ }
+
+ pinned, err := issues_model.GetPinnedIssues(ctx, repo.ID, isPullOption.Value())
+ if err != nil {
+ ctx.ServerError("GetPinnedIssues", err)
+ return
+ }
+
+ ctx.Data["PinnedIssues"] = pinned
+ ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.Doer.IsAdmin)
+ ctx.Data["IssueStats"] = issueStats
+ ctx.Data["OpenCount"] = issueStats.OpenCount
+ ctx.Data["ClosedCount"] = issueStats.ClosedCount
+ linkStr := "%s?q=%s&type=%s&sort=%s&state=%s&labels=%s&milestone=%d&project=%d&assignee=%d&poster=%d&archived=%t"
+ ctx.Data["AllStatesLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "all", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["OpenLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "open", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["ClosedLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "closed", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["SelLabelIDs"] = labelIDs
+ ctx.Data["SelectLabels"] = selectLabels
+ ctx.Data["ViewType"] = viewType
+ ctx.Data["SortType"] = sortType
+ ctx.Data["MilestoneID"] = milestoneID
+ ctx.Data["ProjectID"] = projectID
+ ctx.Data["AssigneeID"] = assigneeID
+ ctx.Data["PosterID"] = posterID
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["IsShowClosed"] = isShowClosed
+ switch {
+ case isShowClosed.Value():
+ ctx.Data["State"] = "closed"
+ case !isShowClosed.Has():
+ ctx.Data["State"] = "all"
+ default:
+ ctx.Data["State"] = "open"
+ }
+ ctx.Data["ShowArchivedLabels"] = archived
+
+ pager.AddParam(ctx, "q", "Keyword")
+ pager.AddParam(ctx, "type", "ViewType")
+ pager.AddParam(ctx, "sort", "SortType")
+ pager.AddParam(ctx, "state", "State")
+ pager.AddParam(ctx, "labels", "SelectLabels")
+ pager.AddParam(ctx, "milestone", "MilestoneID")
+ pager.AddParam(ctx, "project", "ProjectID")
+ pager.AddParam(ctx, "assignee", "AssigneeID")
+ pager.AddParam(ctx, "poster", "PosterID")
+ pager.AddParam(ctx, "archived", "ShowArchivedLabels")
+ pager.AddParam(ctx, "fuzzy", "IsFuzzy")
+
+ ctx.Data["Page"] = pager
+}
+
+func issueIDsFromSearch(ctx *context.Context, keyword string, fuzzy bool, opts *issues_model.IssuesOptions) ([]int64, error) {
+ ids, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts).Copy(
+ func(o *issue_indexer.SearchOptions) {
+ o.IsFuzzyKeyword = fuzzy
+ },
+ ))
+ if err != nil {
+ return nil, fmt.Errorf("SearchIssues: %w", err)
+ }
+ return ids, nil
+}
+
+// Issues render issues page
+func Issues(ctx *context.Context) {
+ isPullList := ctx.Params(":type") == "pulls"
+ if isPullList {
+ MustAllowPulls(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("repo.pulls")
+ ctx.Data["PageIsPullList"] = true
+ } else {
+ MustEnableIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("repo.issues")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ }
+
+ issues(ctx, ctx.FormInt64("milestone"), ctx.FormInt64("project"), optional.Some(isPullList))
+ if ctx.Written() {
+ return
+ }
+
+ renderMilestones(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.Data["CanWriteIssuesOrPulls"] = ctx.Repo.CanWriteIssuesOrPulls(isPullList)
+
+ ctx.HTML(http.StatusOK, tplIssues)
+}
+
+func renderMilestones(ctx *context.Context) {
+ // Get milestones
+ milestones, err := db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetAllRepoMilestones", err)
+ return
+ }
+
+ openMilestones, closedMilestones := issues_model.MilestoneList{}, issues_model.MilestoneList{}
+ for _, milestone := range milestones {
+ if milestone.IsClosed {
+ closedMilestones = append(closedMilestones, milestone)
+ } else {
+ openMilestones = append(openMilestones, milestone)
+ }
+ }
+ ctx.Data["OpenMilestones"] = openMilestones
+ ctx.Data["ClosedMilestones"] = closedMilestones
+}
+
+// RetrieveRepoMilestonesAndAssignees find all the milestones and assignees of a repository
+func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *repo_model.Repository) {
+ var err error
+ ctx.Data["OpenMilestones"], err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+ ctx.Data["ClosedMilestones"], err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+}
+
+func retrieveProjects(ctx *context.Context, repo *repo_model.Repository) {
+ // Distinguish whether the owner of the repository
+ // is an individual or an organization
+ repoOwnerType := project_model.TypeIndividual
+ if repo.Owner.IsOrganization() {
+ repoOwnerType = project_model.TypeOrganization
+ }
+ var err error
+ projects, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ Type: project_model.TypeRepository,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+ projects2, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: repo.OwnerID,
+ IsClosed: optional.Some(false),
+ Type: repoOwnerType,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ ctx.Data["OpenProjects"] = append(projects, projects2...)
+
+ projects, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ Type: project_model.TypeRepository,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+ projects2, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: repo.OwnerID,
+ IsClosed: optional.Some(true),
+ Type: repoOwnerType,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ ctx.Data["ClosedProjects"] = append(projects, projects2...)
+}
+
+// repoReviewerSelection items to bee shown
+type repoReviewerSelection struct {
+ IsTeam bool
+ Team *organization.Team
+ User *user_model.User
+ Review *issues_model.Review
+ CanChange bool
+ Checked bool
+ ItemID int64
+}
+
+// RetrieveRepoReviewers find all reviewers of a repository
+func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, issue *issues_model.Issue, canChooseReviewer bool) {
+ ctx.Data["CanChooseReviewer"] = canChooseReviewer
+
+ originalAuthorReviews, err := issues_model.GetReviewersFromOriginalAuthorsByIssueID(ctx, issue.ID)
+ if err != nil {
+ ctx.ServerError("GetReviewersFromOriginalAuthorsByIssueID", err)
+ return
+ }
+ ctx.Data["OriginalReviews"] = originalAuthorReviews
+
+ reviews, err := issues_model.GetReviewsByIssueID(ctx, issue.ID)
+ if err != nil {
+ ctx.ServerError("GetReviewersByIssueID", err)
+ return
+ }
+
+ if len(reviews) == 0 && !canChooseReviewer {
+ return
+ }
+
+ var (
+ pullReviews []*repoReviewerSelection
+ reviewersResult []*repoReviewerSelection
+ teamReviewersResult []*repoReviewerSelection
+ teamReviewers []*organization.Team
+ reviewers []*user_model.User
+ )
+
+ if canChooseReviewer {
+ posterID := issue.PosterID
+ if issue.OriginalAuthorID > 0 {
+ posterID = 0
+ }
+
+ reviewers, err = repo_model.GetReviewers(ctx, repo, ctx.Doer.ID, posterID)
+ if err != nil {
+ ctx.ServerError("GetReviewers", err)
+ return
+ }
+
+ teamReviewers, err = repo_service.GetReviewerTeams(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetReviewerTeams", err)
+ return
+ }
+
+ if len(reviewers) > 0 {
+ reviewersResult = make([]*repoReviewerSelection, 0, len(reviewers))
+ }
+
+ if len(teamReviewers) > 0 {
+ teamReviewersResult = make([]*repoReviewerSelection, 0, len(teamReviewers))
+ }
+ }
+
+ pullReviews = make([]*repoReviewerSelection, 0, len(reviews))
+
+ for _, review := range reviews {
+ tmp := &repoReviewerSelection{
+ Checked: review.Type == issues_model.ReviewTypeRequest,
+ Review: review,
+ ItemID: review.ReviewerID,
+ }
+ if review.ReviewerTeamID > 0 {
+ tmp.IsTeam = true
+ tmp.ItemID = -review.ReviewerTeamID
+ }
+
+ if canChooseReviewer {
+ // Users who can choose reviewers can also remove review requests
+ tmp.CanChange = true
+ } else if ctx.Doer != nil && ctx.Doer.ID == review.ReviewerID && review.Type == issues_model.ReviewTypeRequest {
+ // A user can refuse review requests
+ tmp.CanChange = true
+ }
+
+ pullReviews = append(pullReviews, tmp)
+
+ if canChooseReviewer {
+ if tmp.IsTeam {
+ teamReviewersResult = append(teamReviewersResult, tmp)
+ } else {
+ reviewersResult = append(reviewersResult, tmp)
+ }
+ }
+ }
+
+ if len(pullReviews) > 0 {
+ // Drop all non-existing users and teams from the reviews
+ currentPullReviewers := make([]*repoReviewerSelection, 0, len(pullReviews))
+ for _, item := range pullReviews {
+ if item.Review.ReviewerID > 0 {
+ if err = item.Review.LoadReviewer(ctx); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ continue
+ }
+ ctx.ServerError("LoadReviewer", err)
+ return
+ }
+ item.User = item.Review.Reviewer
+ } else if item.Review.ReviewerTeamID > 0 {
+ if err = item.Review.LoadReviewerTeam(ctx); err != nil {
+ if organization.IsErrTeamNotExist(err) {
+ continue
+ }
+ ctx.ServerError("LoadReviewerTeam", err)
+ return
+ }
+ item.Team = item.Review.ReviewerTeam
+ } else {
+ continue
+ }
+
+ currentPullReviewers = append(currentPullReviewers, item)
+ }
+ ctx.Data["PullReviewers"] = currentPullReviewers
+ }
+
+ if canChooseReviewer && reviewersResult != nil {
+ preadded := len(reviewersResult)
+ for _, reviewer := range reviewers {
+ found := false
+ reviewAddLoop:
+ for _, tmp := range reviewersResult[:preadded] {
+ if tmp.ItemID == reviewer.ID {
+ tmp.User = reviewer
+ found = true
+ break reviewAddLoop
+ }
+ }
+
+ if found {
+ continue
+ }
+
+ reviewersResult = append(reviewersResult, &repoReviewerSelection{
+ IsTeam: false,
+ CanChange: true,
+ User: reviewer,
+ ItemID: reviewer.ID,
+ })
+ }
+
+ ctx.Data["Reviewers"] = reviewersResult
+ }
+
+ if canChooseReviewer && teamReviewersResult != nil {
+ preadded := len(teamReviewersResult)
+ for _, team := range teamReviewers {
+ found := false
+ teamReviewAddLoop:
+ for _, tmp := range teamReviewersResult[:preadded] {
+ if tmp.ItemID == -team.ID {
+ tmp.Team = team
+ found = true
+ break teamReviewAddLoop
+ }
+ }
+
+ if found {
+ continue
+ }
+
+ teamReviewersResult = append(teamReviewersResult, &repoReviewerSelection{
+ IsTeam: true,
+ CanChange: true,
+ Team: team,
+ ItemID: -team.ID,
+ })
+ }
+
+ ctx.Data["TeamReviewers"] = teamReviewersResult
+ }
+}
+
+// RetrieveRepoMetas find all the meta information of a repository
+func RetrieveRepoMetas(ctx *context.Context, repo *repo_model.Repository, isPull bool) []*issues_model.Label {
+ if !ctx.Repo.CanWriteIssuesOrPulls(isPull) {
+ return nil
+ }
+
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return nil
+ }
+ ctx.Data["Labels"] = labels
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ return nil
+ }
+
+ ctx.Data["OrgLabels"] = orgLabels
+ labels = append(labels, orgLabels...)
+ }
+
+ RetrieveRepoMilestonesAndAssignees(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+
+ retrieveProjects(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return nil
+ }
+
+ // Contains true if the user can create issue dependencies
+ ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, isPull)
+
+ return labels
+}
+
+// Tries to load and set an issue template. The first return value indicates if a template was loaded.
+func setTemplateIfExists(ctx *context.Context, ctxDataKey string, possibleFiles []string) (bool, map[string]error) {
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ return false, nil
+ }
+
+ templateCandidates := make([]string, 0, 1+len(possibleFiles))
+ if t := ctx.FormString("template"); t != "" {
+ templateCandidates = append(templateCandidates, t)
+ }
+ templateCandidates = append(templateCandidates, possibleFiles...) // Append files to the end because they should be fallback
+
+ templateErrs := map[string]error{}
+ for _, filename := range templateCandidates {
+ if ok, _ := commit.HasFile(filename); !ok {
+ continue
+ }
+ template, err := issue_template.UnmarshalFromCommit(commit, filename)
+ if err != nil {
+ templateErrs[filename] = err
+ continue
+ }
+ ctx.Data[issueTemplateTitleKey] = template.Title
+ ctx.Data[ctxDataKey] = template.Content
+
+ if template.Type() == api.IssueTemplateTypeYaml {
+ // Replace field default values by values from query
+ for _, field := range template.Fields {
+ fieldValue := ctx.FormString("field:" + field.ID)
+ if fieldValue != "" {
+ field.Attributes["value"] = fieldValue
+ }
+ }
+
+ ctx.Data["Fields"] = template.Fields
+ ctx.Data["TemplateFile"] = template.FileName
+ }
+ labelIDs := make([]string, 0, len(template.Labels))
+ if repoLabels, err := issues_model.GetLabelsByRepoID(ctx, ctx.Repo.Repository.ID, "", db.ListOptions{}); err == nil {
+ ctx.Data["Labels"] = repoLabels
+ if ctx.Repo.Owner.IsOrganization() {
+ if orgLabels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{}); err == nil {
+ ctx.Data["OrgLabels"] = orgLabels
+ repoLabels = append(repoLabels, orgLabels...)
+ }
+ }
+
+ for _, metaLabel := range template.Labels {
+ for _, repoLabel := range repoLabels {
+ if strings.EqualFold(repoLabel.Name, metaLabel) {
+ repoLabel.IsChecked = true
+ labelIDs = append(labelIDs, strconv.FormatInt(repoLabel.ID, 10))
+ break
+ }
+ }
+ }
+ }
+
+ if template.Ref != "" && !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
+ template.Ref = git.BranchPrefix + template.Ref
+ }
+ ctx.Data["HasSelectedLabel"] = len(labelIDs) > 0
+ ctx.Data["label_ids"] = strings.Join(labelIDs, ",")
+ ctx.Data["Reference"] = template.Ref
+ ctx.Data["RefEndName"] = git.RefName(template.Ref).ShortName()
+ return true, templateErrs
+ }
+ return false, templateErrs
+}
+
+// NewIssue render creating issue page
+func NewIssue(ctx *context.Context) {
+ issueConfig, _ := issue_service.GetTemplateConfigFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ hasTemplates := issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = hasTemplates
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ title := ctx.FormString("title")
+ ctx.Data["TitleQuery"] = title
+ body := ctx.FormString("body")
+ ctx.Data["BodyQuery"] = body
+
+ isProjectsEnabled := ctx.Repo.CanRead(unit.TypeProjects)
+ ctx.Data["IsProjectsEnabled"] = isProjectsEnabled
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ milestoneID := ctx.FormInt64("milestone")
+ if milestoneID > 0 {
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ log.Error("GetMilestoneByID: %d: %v", milestoneID, err)
+ } else {
+ ctx.Data["milestone_id"] = milestoneID
+ ctx.Data["Milestone"] = milestone
+ }
+ }
+
+ projectID := ctx.FormInt64("project")
+ if projectID > 0 && isProjectsEnabled {
+ project, err := project_model.GetProjectByID(ctx, projectID)
+ if err != nil {
+ log.Error("GetProjectByID: %d: %v", projectID, err)
+ } else if project.RepoID != ctx.Repo.Repository.ID {
+ log.Error("GetProjectByID: %d: %v", projectID, fmt.Errorf("project[%d] not in repo [%d]", project.ID, ctx.Repo.Repository.ID))
+ } else {
+ ctx.Data["project_id"] = projectID
+ ctx.Data["Project"] = project
+ }
+
+ if len(ctx.Req.URL.Query().Get("project")) > 0 {
+ ctx.Data["redirect_after_creation"] = "project"
+ }
+ }
+
+ RetrieveRepoMetas(ctx, ctx.Repo.Repository, false)
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ _, templateErrs := issue_service.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ templateLoaded, errs := setTemplateIfExists(ctx, issueTemplateKey, IssueTemplateCandidates)
+ for k, v := range errs {
+ templateErrs[k] = v
+ }
+ if ctx.Written() {
+ return
+ }
+
+ if len(templateErrs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, templateErrs), true)
+ }
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypeIssues)
+
+ if !issueConfig.BlankIssuesEnabled && hasTemplates && !templateLoaded {
+ // The "issues/new" and "issues/new/choose" share the same query parameters "project" and "milestone", if blank issues are disabled, just redirect to the "issues/choose" page with these parameters.
+ ctx.Redirect(fmt.Sprintf("%s/issues/new/choose?%s", ctx.Repo.Repository.Link(), ctx.Req.URL.RawQuery), http.StatusSeeOther)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplIssueNew)
+}
+
+func renderErrorOfTemplates(ctx *context.Context, errs map[string]error) template.HTML {
+ var files []string
+ for k := range errs {
+ files = append(files, k)
+ }
+ sort.Strings(files) // keep the output stable
+
+ var lines []string
+ for _, file := range files {
+ lines = append(lines, fmt.Sprintf("%s: %v", file, errs[file]))
+ }
+
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.issues.choose.ignore_invalid_templates"),
+ "Summary": ctx.Tr("repo.issues.choose.invalid_templates", len(errs)),
+ "Details": utils.SanitizeFlashErrorString(strings.Join(lines, "\n")),
+ })
+ if err != nil {
+ log.Debug("render flash error: %v", err)
+ flashError = ctx.Locale.Tr("repo.issues.choose.ignore_invalid_templates")
+ }
+ return flashError
+}
+
+// NewIssueChooseTemplate render creating issue from template page
+func NewIssueChooseTemplate(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+
+ issueTemplates, errs := issue_service.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["IssueTemplates"] = issueTemplates
+
+ if len(errs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, errs), true)
+ }
+
+ if !issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo) {
+ // The "issues/new" and "issues/new/choose" share the same query parameters "project" and "milestone", if no template here, just redirect to the "issues/new" page with these parameters.
+ ctx.Redirect(fmt.Sprintf("%s/issues/new?%s", ctx.Repo.Repository.Link(), ctx.Req.URL.RawQuery), http.StatusSeeOther)
+ return
+ }
+
+ issueConfig, err := issue_service.GetTemplateConfigFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["IssueConfig"] = issueConfig
+ ctx.Data["IssueConfigError"] = err // ctx.Flash.Err makes problems here
+
+ ctx.Data["milestone"] = ctx.FormInt64("milestone")
+ ctx.Data["project"] = ctx.FormInt64("project")
+
+ ctx.HTML(http.StatusOK, tplIssueChoose)
+}
+
+// DeleteIssue deletes an issue
+func DeleteIssue(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := issue_service.DeleteIssue(ctx, ctx.Doer, ctx.Repo.GitRepo, issue); err != nil {
+ ctx.ServerError("DeleteIssueByID", err)
+ return
+ }
+
+ if issue.IsPull {
+ ctx.Redirect(fmt.Sprintf("%s/pulls", ctx.Repo.Repository.Link()), http.StatusSeeOther)
+ return
+ }
+
+ ctx.Redirect(fmt.Sprintf("%s/issues", ctx.Repo.Repository.Link()), http.StatusSeeOther)
+}
+
+// ValidateRepoMetas check and returns repository's meta information
+func ValidateRepoMetas(ctx *context.Context, form forms.CreateIssueForm, isPull bool) ([]int64, []int64, int64, int64) {
+ var (
+ repo = ctx.Repo.Repository
+ err error
+ )
+
+ labels := RetrieveRepoMetas(ctx, ctx.Repo.Repository, isPull)
+ if ctx.Written() {
+ return nil, nil, 0, 0
+ }
+
+ var labelIDs []int64
+ hasSelected := false
+ // Check labels.
+ if len(form.LabelIDs) > 0 {
+ labelIDs, err = base.StringsToInt64s(strings.Split(form.LabelIDs, ","))
+ if err != nil {
+ return nil, nil, 0, 0
+ }
+ labelIDMark := make(container.Set[int64])
+ labelIDMark.AddMultiple(labelIDs...)
+
+ for i := range labels {
+ if labelIDMark.Contains(labels[i].ID) {
+ labels[i].IsChecked = true
+ hasSelected = true
+ }
+ }
+ }
+
+ ctx.Data["Labels"] = labels
+ ctx.Data["HasSelectedLabel"] = hasSelected
+ ctx.Data["label_ids"] = form.LabelIDs
+
+ // Check milestone.
+ milestoneID := form.MilestoneID
+ if milestoneID > 0 {
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ ctx.ServerError("GetMilestoneByID", err)
+ return nil, nil, 0, 0
+ }
+ if milestone.RepoID != repo.ID {
+ ctx.ServerError("GetMilestoneByID", err)
+ return nil, nil, 0, 0
+ }
+ ctx.Data["Milestone"] = milestone
+ ctx.Data["milestone_id"] = milestoneID
+ }
+
+ if form.ProjectID > 0 {
+ p, err := project_model.GetProjectByID(ctx, form.ProjectID)
+ if err != nil {
+ ctx.ServerError("GetProjectByID", err)
+ return nil, nil, 0, 0
+ }
+ if p.RepoID != ctx.Repo.Repository.ID && p.OwnerID != ctx.Repo.Repository.OwnerID {
+ ctx.NotFound("", nil)
+ return nil, nil, 0, 0
+ }
+
+ ctx.Data["Project"] = p
+ ctx.Data["project_id"] = form.ProjectID
+ }
+
+ // Check assignees
+ var assigneeIDs []int64
+ if len(form.AssigneeIDs) > 0 {
+ assigneeIDs, err = base.StringsToInt64s(strings.Split(form.AssigneeIDs, ","))
+ if err != nil {
+ return nil, nil, 0, 0
+ }
+
+ // Check if the passed assignees actually exists and is assignable
+ for _, aID := range assigneeIDs {
+ assignee, err := user_model.GetUserByID(ctx, aID)
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return nil, nil, 0, 0
+ }
+
+ valid, err := access_model.CanBeAssigned(ctx, assignee, repo, isPull)
+ if err != nil {
+ ctx.ServerError("CanBeAssigned", err)
+ return nil, nil, 0, 0
+ }
+
+ if !valid {
+ ctx.ServerError("canBeAssigned", repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: aID, RepoName: repo.Name})
+ return nil, nil, 0, 0
+ }
+ }
+ }
+
+ // Keep the old assignee id thingy for compatibility reasons
+ if form.AssigneeID > 0 {
+ assigneeIDs = append(assigneeIDs, form.AssigneeID)
+ }
+
+ return labelIDs, assigneeIDs, milestoneID, form.ProjectID
+}
+
+// NewIssuePost response for creating new issue
+func NewIssuePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateIssueForm)
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ var (
+ repo = ctx.Repo.Repository
+ attachments []string
+ )
+
+ labelIDs, assigneeIDs, milestoneID, projectID := ValidateRepoMetas(ctx, *form, false)
+ if ctx.Written() {
+ return
+ }
+
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.JSONError(ctx.Tr("repo.issues.new.title_empty"))
+ return
+ }
+
+ content := form.Content
+ if filename := ctx.Req.Form.Get("template-file"); filename != "" {
+ if template, err := issue_template.UnmarshalFromRepo(ctx.Repo.GitRepo, ctx.Repo.Repository.DefaultBranch, filename); err == nil {
+ content = issue_template.RenderToMarkdown(template, ctx.Req.Form)
+ }
+ }
+
+ issue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Repo: repo,
+ Title: form.Title,
+ PosterID: ctx.Doer.ID,
+ Poster: ctx.Doer,
+ MilestoneID: milestoneID,
+ Content: content,
+ Ref: form.Ref,
+ }
+
+ if err := issue_service.NewIssue(ctx, repo, issue, labelIDs, attachments, assigneeIDs); err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.JSONError(ctx.Tr("repo.issues.blocked_by_user"))
+ return
+ } else if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
+ ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
+ return
+ }
+ ctx.ServerError("NewIssue", err)
+ return
+ }
+
+ if projectID > 0 {
+ if !ctx.Repo.CanRead(unit.TypeProjects) {
+ // User must also be able to see the project.
+ ctx.Error(http.StatusBadRequest, "user hasn't permissions to read projects")
+ return
+ }
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectID, 0); err != nil {
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+
+ log.Trace("Issue created: %d/%d", repo.ID, issue.ID)
+ if ctx.FormString("redirect_after_creation") == "project" && projectID > 0 {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects/" + strconv.FormatInt(projectID, 10))
+ } else {
+ ctx.JSONRedirect(issue.Link())
+ }
+}
+
+// roleDescriptor returns the role descriptor for a comment in/with the given repo, poster and issue
+func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *issues_model.Issue, hasOriginalAuthor bool) (issues_model.RoleDescriptor, error) {
+ roleDescriptor := issues_model.RoleDescriptor{}
+
+ if hasOriginalAuthor {
+ return roleDescriptor, nil
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, poster)
+ if err != nil {
+ return roleDescriptor, err
+ }
+
+ // If the poster is the actual poster of the issue, enable Poster role.
+ roleDescriptor.IsPoster = issue.IsPoster(poster.ID)
+
+ // Check if the poster is owner of the repo.
+ if perm.IsOwner() {
+ // If the poster isn't an admin, enable the owner role.
+ if !poster.IsAdmin {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner
+ return roleDescriptor, nil
+ }
+
+ // Otherwise check if poster is the real repo admin.
+ ok, err := access_model.IsUserRealRepoAdmin(ctx, repo, poster)
+ if err != nil {
+ return roleDescriptor, err
+ }
+ if ok {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner
+ return roleDescriptor, nil
+ }
+ }
+
+ // If repo is organization, check Member role
+ if err := repo.LoadOwner(ctx); err != nil {
+ return roleDescriptor, err
+ }
+ if repo.Owner.IsOrganization() {
+ if isMember, err := organization.IsOrganizationMember(ctx, repo.Owner.ID, poster.ID); err != nil {
+ return roleDescriptor, err
+ } else if isMember {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoMember
+ return roleDescriptor, nil
+ }
+ }
+
+ // If the poster is the collaborator of the repo
+ if isCollaborator, err := repo_model.IsCollaborator(ctx, repo.ID, poster.ID); err != nil {
+ return roleDescriptor, err
+ } else if isCollaborator {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoCollaborator
+ return roleDescriptor, nil
+ }
+
+ hasMergedPR, err := issues_model.HasMergedPullRequestInRepo(ctx, repo.ID, poster.ID)
+ if err != nil {
+ return roleDescriptor, err
+ } else if hasMergedPR {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoContributor
+ } else if issue.IsPull {
+ // only display first time contributor in the first opening pull request
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoFirstTimeContributor
+ }
+
+ return roleDescriptor, nil
+}
+
+func getBranchData(ctx *context.Context, issue *issues_model.Issue) {
+ ctx.Data["BaseBranch"] = nil
+ ctx.Data["HeadBranch"] = nil
+ ctx.Data["HeadUserName"] = nil
+ ctx.Data["BaseName"] = ctx.Repo.Repository.OwnerName
+ if issue.IsPull {
+ pull := issue.PullRequest
+ ctx.Data["BaseBranch"] = pull.BaseBranch
+ ctx.Data["HeadBranch"] = pull.HeadBranch
+ ctx.Data["HeadUserName"] = pull.MustHeadUserName(ctx)
+ }
+}
+
+func prepareHiddenCommentType(ctx *context.Context) {
+ var hiddenCommentTypes *big.Int
+ if ctx.IsSigned {
+ val, err := user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes)
+ if err != nil {
+ ctx.ServerError("GetUserSetting", err)
+ return
+ }
+ hiddenCommentTypes, _ = new(big.Int).SetString(val, 10) // we can safely ignore the failed conversion here
+ }
+
+ ctx.Data["ShouldShowCommentType"] = func(commentType issues_model.CommentType) bool {
+ return hiddenCommentTypes == nil || hiddenCommentTypes.Bit(int(commentType)) == 0
+ }
+}
+
+// ViewIssue render issue view page
+func ViewIssue(ctx *context.Context) {
+ if ctx.Params(":type") == "issues" {
+ // If issue was requested we check if repo has external tracker and redirect
+ extIssueUnit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil && extIssueUnit != nil {
+ if extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == markup.IssueNameStyleNumeric || extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == "" {
+ metas := ctx.Repo.Repository.ComposeMetas(ctx)
+ metas["index"] = ctx.Params(":index")
+ res, err := vars.Expand(extIssueUnit.ExternalTrackerConfig().ExternalTrackerFormat, metas)
+ if err != nil {
+ log.Error("unable to expand template vars for issue url. issue: %s, err: %v", metas["index"], err)
+ ctx.ServerError("Expand", err)
+ return
+ }
+ ctx.Redirect(res)
+ return
+ }
+ } else if err != nil && !repo_model.IsErrUnitTypeNotExist(err) {
+ ctx.ServerError("GetUnit", err)
+ return
+ }
+ }
+
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.ServerError("GetIssueByIndex", err)
+ }
+ return
+ }
+ if issue.Repo == nil {
+ issue.Repo = ctx.Repo.Repository
+ }
+
+ // Make sure type and URL matches.
+ if ctx.Params(":type") == "issues" && issue.IsPull {
+ ctx.Redirect(issue.Link())
+ return
+ } else if ctx.Params(":type") == "pulls" && !issue.IsPull {
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ if issue.IsPull {
+ MustAllowPulls(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullConversation"] = true
+ } else {
+ MustEnableIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ }
+
+ if issue.IsPull && !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.Data["IssueType"] = "pulls"
+ } else if !issue.IsPull && !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.Data["IssueType"] = "issues"
+ } else {
+ ctx.Data["IssueType"] = "all"
+ }
+
+ ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanRead(unit.TypeProjects)
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ if err = filterXRefComments(ctx, issue); err != nil {
+ ctx.ServerError("filterXRefComments", err)
+ return
+ }
+
+ ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, emoji.ReplaceAliases(issue.Title))
+
+ iw := new(issues_model.IssueWatch)
+ if ctx.Doer != nil {
+ iw.UserID = ctx.Doer.ID
+ iw.IssueID = issue.ID
+ iw.IsWatching, err = issues_model.CheckIssueWatch(ctx, ctx.Doer, issue)
+ if err != nil {
+ ctx.ServerError("CheckIssueWatch", err)
+ return
+ }
+ }
+ ctx.Data["IssueWatch"] = iw
+ issue.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, issue.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ repo := ctx.Repo.Repository
+
+ // Get more information if it's a pull request.
+ if issue.IsPull {
+ if issue.PullRequest.HasMerged {
+ ctx.Data["DisableStatusChange"] = issue.PullRequest.HasMerged
+ PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ PrepareViewPullInfo(ctx, issue)
+ ctx.Data["DisableStatusChange"] = ctx.Data["IsPullRequestBroken"] == true && issue.IsClosed
+ }
+ if ctx.Written() {
+ return
+ }
+ }
+
+ // Metas.
+ // Check labels.
+ labelIDMark := make(container.Set[int64])
+ for _, label := range issue.Labels {
+ labelIDMark.Add(label.ID)
+ }
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return
+ }
+ ctx.Data["Labels"] = labels
+
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+ ctx.Data["OrgLabels"] = orgLabels
+
+ labels = append(labels, orgLabels...)
+ }
+
+ hasSelected := false
+ for i := range labels {
+ if labelIDMark.Contains(labels[i].ID) {
+ labels[i].IsChecked = true
+ hasSelected = true
+ }
+ }
+ ctx.Data["HasSelectedLabel"] = hasSelected
+
+ // Check milestone and assignee.
+ if ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ RetrieveRepoMilestonesAndAssignees(ctx, repo)
+ retrieveProjects(ctx, repo)
+
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if issue.IsPull {
+ canChooseReviewer := false
+ if ctx.Doer != nil && ctx.IsSigned {
+ canChooseReviewer = issue_service.CanDoerChangeReviewRequests(ctx, ctx.Doer, repo, issue)
+ }
+
+ RetrieveRepoReviewers(ctx, repo, issue, canChooseReviewer)
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if ctx.IsSigned {
+ // Update issue-user.
+ if err = activities_model.SetIssueReadBy(ctx, issue.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return
+ }
+ }
+
+ var (
+ role issues_model.RoleDescriptor
+ ok bool
+ marked = make(map[int64]issues_model.RoleDescriptor)
+ comment *issues_model.Comment
+ participants = make([]*user_model.User, 1, 10)
+ latestCloseCommentID int64
+ )
+ if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) {
+ if ctx.IsSigned {
+ // Deal with the stopwatch
+ ctx.Data["IsStopwatchRunning"] = issues_model.StopwatchExists(ctx, ctx.Doer.ID, issue.ID)
+ if !ctx.Data["IsStopwatchRunning"].(bool) {
+ var exists bool
+ var swIssue *issues_model.Issue
+ if exists, _, swIssue, err = issues_model.HasUserStopwatch(ctx, ctx.Doer.ID); err != nil {
+ ctx.ServerError("HasUserStopwatch", err)
+ return
+ }
+ ctx.Data["HasUserStopwatch"] = exists
+ if exists {
+ // Add warning if the user has already a stopwatch
+ // Add link to the issue of the already running stopwatch
+ ctx.Data["OtherStopwatchURL"] = swIssue.Link()
+ }
+ }
+ ctx.Data["CanUseTimetracker"] = ctx.Repo.CanUseTimetracker(ctx, issue, ctx.Doer)
+ } else {
+ ctx.Data["CanUseTimetracker"] = false
+ }
+ if ctx.Data["WorkingUsers"], err = issues_model.TotalTimesForEachUser(ctx, &issues_model.FindTrackedTimesOptions{IssueID: issue.ID}); err != nil {
+ ctx.ServerError("TotalTimesForEachUser", err)
+ return
+ }
+ }
+
+ // Check if the user can use the dependencies
+ ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull)
+
+ // check if dependencies can be created across repositories
+ ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies
+
+ if issue.ShowRole, err = roleDescriptor(ctx, repo, issue.Poster, issue, issue.HasOriginalAuthor()); err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[issue.PosterID] = issue.ShowRole
+
+ // Render comments and fetch participants.
+ participants[0] = issue.Poster
+
+ if err := issue.Comments.LoadAttachmentsByIssue(ctx); err != nil {
+ ctx.ServerError("LoadAttachmentsByIssue", err)
+ return
+ }
+ if err := issue.Comments.LoadPosters(ctx); err != nil {
+ ctx.ServerError("LoadPosters", err)
+ return
+ }
+
+ for _, comment = range issue.Comments {
+ comment.Issue = issue
+
+ if comment.Type == issues_model.CommentTypeComment || comment.Type == issues_model.CommentTypeReview {
+ comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ // Check tag.
+ role, ok = marked[comment.PosterID]
+ if ok {
+ comment.ShowRole = role
+ continue
+ }
+
+ comment.ShowRole, err = roleDescriptor(ctx, repo, comment.Poster, issue, comment.HasOriginalAuthor())
+ if err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[comment.PosterID] = comment.ShowRole
+ participants = addParticipant(comment.Poster, participants)
+ } else if comment.Type == issues_model.CommentTypeLabel {
+ if err = comment.LoadLabel(ctx); err != nil {
+ ctx.ServerError("LoadLabel", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypeMilestone {
+ if err = comment.LoadMilestone(ctx); err != nil {
+ ctx.ServerError("LoadMilestone", err)
+ return
+ }
+ ghostMilestone := &issues_model.Milestone{
+ ID: -1,
+ Name: ctx.Locale.TrString("repo.issues.deleted_milestone"),
+ }
+ if comment.OldMilestoneID > 0 && comment.OldMilestone == nil {
+ comment.OldMilestone = ghostMilestone
+ }
+ if comment.MilestoneID > 0 && comment.Milestone == nil {
+ comment.Milestone = ghostMilestone
+ }
+ } else if comment.Type == issues_model.CommentTypeProject {
+ if err = comment.LoadProject(ctx); err != nil {
+ ctx.ServerError("LoadProject", err)
+ return
+ }
+
+ ghostProject := &project_model.Project{
+ ID: project_model.GhostProjectID,
+ Title: ctx.Locale.TrString("repo.issues.deleted_project"),
+ }
+
+ if comment.OldProjectID > 0 && comment.OldProject == nil {
+ comment.OldProject = ghostProject
+ }
+
+ if comment.ProjectID > 0 && comment.Project == nil {
+ comment.Project = ghostProject
+ }
+ } else if comment.Type == issues_model.CommentTypeAssignees || comment.Type == issues_model.CommentTypeReviewRequest {
+ if err = comment.LoadAssigneeUserAndTeam(ctx); err != nil {
+ ctx.ServerError("LoadAssigneeUserAndTeam", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypeRemoveDependency || comment.Type == issues_model.CommentTypeAddDependency {
+ if err = comment.LoadDepIssueDetails(ctx); err != nil {
+ if !issues_model.IsErrIssueNotExist(err) {
+ ctx.ServerError("LoadDepIssueDetails", err)
+ return
+ }
+ }
+ } else if comment.Type.HasContentSupport() {
+ comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ if err = comment.LoadReview(ctx); err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("LoadReview", err)
+ return
+ }
+ participants = addParticipant(comment.Poster, participants)
+ if comment.Review == nil {
+ continue
+ }
+ if err = comment.Review.LoadAttributes(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ ctx.ServerError("Review.LoadAttributes", err)
+ return
+ }
+ comment.Review.Reviewer = user_model.NewGhostUser()
+ }
+ if err = comment.Review.LoadCodeComments(ctx); err != nil {
+ ctx.ServerError("Review.LoadCodeComments", err)
+ return
+ }
+ for _, codeComments := range comment.Review.CodeComments {
+ for _, lineComments := range codeComments {
+ for _, c := range lineComments {
+ // Check tag.
+ role, ok = marked[c.PosterID]
+ if ok {
+ c.ShowRole = role
+ continue
+ }
+
+ c.ShowRole, err = roleDescriptor(ctx, repo, c.Poster, issue, c.HasOriginalAuthor())
+ if err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[c.PosterID] = c.ShowRole
+ participants = addParticipant(c.Poster, participants)
+ }
+ }
+ }
+ if err = comment.LoadResolveDoer(ctx); err != nil {
+ ctx.ServerError("LoadResolveDoer", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypePullRequestPush {
+ participants = addParticipant(comment.Poster, participants)
+ if err = comment.LoadPushCommits(ctx); err != nil {
+ ctx.ServerError("LoadPushCommits", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for _, commit := range comment.Commits {
+ if commit.Status == nil {
+ continue
+ }
+ commit.Status.HideActionsURL(ctx)
+ git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
+ }
+ }
+ } else if comment.Type == issues_model.CommentTypeAddTimeManual ||
+ comment.Type == issues_model.CommentTypeStopTracking ||
+ comment.Type == issues_model.CommentTypeDeleteTimeManual {
+ // drop error since times could be pruned from DB..
+ _ = comment.LoadTime(ctx)
+ if comment.Content != "" {
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so "|" is used as delimiter to mark the new format
+ if comment.Content[0] != '|' {
+ // handle old time comments that have formatted text stored
+ comment.RenderedContent = templates.SanitizeHTML(comment.Content)
+ comment.Content = ""
+ } else {
+ // else it's just a duration in seconds to pass on to the frontend
+ comment.Content = comment.Content[1:]
+ }
+ }
+ }
+
+ if comment.Type == issues_model.CommentTypeClose || comment.Type == issues_model.CommentTypeMergePull {
+ // record ID of the latest closed/merged comment.
+ // if PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered.
+ latestCloseCommentID = comment.ID
+ }
+ }
+
+ ctx.Data["LatestCloseCommentID"] = latestCloseCommentID
+
+ // Combine multiple label assignments into a single comment
+ combineLabelComments(issue)
+
+ getBranchData(ctx, issue)
+ if issue.IsPull {
+ pull := issue.PullRequest
+ pull.Issue = issue
+ canDelete := false
+ allowMerge := false
+
+ if ctx.IsSigned {
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ log.Error("LoadHeadRepo: %v", err)
+ } else if pull.HeadRepo != nil {
+ perm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if perm.CanWrite(unit.TypeCode) {
+ // Check if branch is not protected
+ if pull.HeadBranch != pull.HeadRepo.DefaultBranch {
+ if protected, err := git_model.IsBranchProtected(ctx, pull.HeadRepo.ID, pull.HeadBranch); err != nil {
+ log.Error("IsProtectedBranch: %v", err)
+ } else if !protected {
+ canDelete = true
+ ctx.Data["DeleteBranchLink"] = issue.Link() + "/cleanup"
+ }
+ }
+ ctx.Data["CanWriteToHeadRepo"] = true
+ }
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ log.Error("LoadBaseRepo: %v", err)
+ }
+ perm, err := access_model.GetUserRepoPermission(ctx, pull.BaseRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ allowMerge, err = pull_service.IsUserAllowedToMerge(ctx, pull, perm, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToMerge", err)
+ return
+ }
+
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ }
+
+ ctx.Data["AllowMerge"] = allowMerge
+
+ prUnit, err := repo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ ctx.ServerError("GetUnit", err)
+ return
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ ctx.Data["AutodetectManualMerge"] = prConfig.AutodetectManualMerge
+
+ var mergeStyle repo_model.MergeStyle
+ // Check correct values and select default
+ if ms, ok := ctx.Data["MergeStyle"].(repo_model.MergeStyle); !ok ||
+ !prConfig.IsMergeStyleAllowed(ms) {
+ defaultMergeStyle := prConfig.GetDefaultMergeStyle()
+ if prConfig.IsMergeStyleAllowed(defaultMergeStyle) && !ok {
+ mergeStyle = defaultMergeStyle
+ } else if prConfig.AllowMerge {
+ mergeStyle = repo_model.MergeStyleMerge
+ } else if prConfig.AllowRebase {
+ mergeStyle = repo_model.MergeStyleRebase
+ } else if prConfig.AllowRebaseMerge {
+ mergeStyle = repo_model.MergeStyleRebaseMerge
+ } else if prConfig.AllowSquash {
+ mergeStyle = repo_model.MergeStyleSquash
+ } else if prConfig.AllowFastForwardOnly {
+ mergeStyle = repo_model.MergeStyleFastForwardOnly
+ } else if prConfig.AllowManualMerge {
+ mergeStyle = repo_model.MergeStyleManuallyMerged
+ }
+ }
+
+ ctx.Data["MergeStyle"] = mergeStyle
+
+ defaultMergeMessage, defaultMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, mergeStyle)
+ if err != nil {
+ ctx.ServerError("GetDefaultMergeMessage", err)
+ return
+ }
+ ctx.Data["DefaultMergeMessage"] = defaultMergeMessage
+ ctx.Data["DefaultMergeBody"] = defaultMergeBody
+
+ defaultSquashMergeMessage, defaultSquashMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, repo_model.MergeStyleSquash)
+ if err != nil {
+ ctx.ServerError("GetDefaultSquashMergeMessage", err)
+ return
+ }
+ ctx.Data["DefaultSquashMergeMessage"] = defaultSquashMergeMessage
+ ctx.Data["DefaultSquashMergeBody"] = defaultSquashMergeBody
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return
+ }
+ ctx.Data["ShowMergeInstructions"] = true
+ if pb != nil {
+ pb.Repo = pull.BaseRepo
+ var showMergeInstructions bool
+ if ctx.Doer != nil {
+ showMergeInstructions = pb.CanUserPush(ctx, ctx.Doer)
+ }
+ ctx.Data["ProtectedBranch"] = pb
+ ctx.Data["IsBlockedByApprovals"] = !issues_model.HasEnoughApprovals(ctx, pb, pull)
+ ctx.Data["IsBlockedByRejection"] = issues_model.MergeBlockedByRejectedReview(ctx, pb, pull)
+ ctx.Data["IsBlockedByOfficialReviewRequests"] = issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pull)
+ ctx.Data["IsBlockedByOutdatedBranch"] = issues_model.MergeBlockedByOutdatedBranch(pb, pull)
+ ctx.Data["GrantedApprovals"] = issues_model.GetGrantedApprovalsCount(ctx, pb, pull)
+ ctx.Data["RequireSigned"] = pb.RequireSignedCommits
+ ctx.Data["ChangedProtectedFiles"] = pull.ChangedProtectedFiles
+ ctx.Data["IsBlockedByChangedProtectedFiles"] = len(pull.ChangedProtectedFiles) != 0
+ ctx.Data["ChangedProtectedFilesNum"] = len(pull.ChangedProtectedFiles)
+ ctx.Data["ShowMergeInstructions"] = showMergeInstructions
+ }
+ ctx.Data["WillSign"] = false
+ if ctx.Doer != nil {
+ sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, pull.BaseRepo.RepoPath(), pull.BaseBranch, pull.GetGitRefName())
+ ctx.Data["WillSign"] = sign
+ ctx.Data["SigningKey"] = key
+ if err != nil {
+ if asymkey_service.IsErrWontSign(err) {
+ ctx.Data["WontSignReason"] = err.(*asymkey_service.ErrWontSign).Reason
+ } else {
+ ctx.Data["WontSignReason"] = "error"
+ log.Error("Error whilst checking if could sign pr %d in repo %s. Error: %v", pull.ID, pull.BaseRepo.FullName(), err)
+ }
+ }
+ } else {
+ ctx.Data["WontSignReason"] = "not_signed_in"
+ }
+
+ isPullBranchDeletable := canDelete &&
+ pull.HeadRepo != nil &&
+ git.IsBranchExist(ctx, pull.HeadRepo.RepoPath(), pull.HeadBranch) &&
+ (!pull.HasMerged || ctx.Data["HeadBranchCommitID"] == ctx.Data["PullHeadCommitID"])
+
+ if isPullBranchDeletable && pull.HasMerged {
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pull.HeadRepoID, pull.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+
+ isPullBranchDeletable = !exist
+ }
+ ctx.Data["IsPullBranchDeletable"] = isPullBranchDeletable
+
+ stillCanManualMerge := func() bool {
+ if pull.HasMerged || issue.IsClosed || !ctx.IsSigned {
+ return false
+ }
+ if pull.CanAutoMerge() || pull.IsWorkInProgress(ctx) || pull.IsChecking() {
+ return false
+ }
+ if allowMerge && prConfig.AllowManualMerge {
+ return true
+ }
+
+ return false
+ }
+
+ ctx.Data["StillCanManualMerge"] = stillCanManualMerge()
+
+ // Check if there is a pending pr merge
+ ctx.Data["HasPendingPullRequestMerge"], ctx.Data["PendingPullRequestMerge"], err = pull_model.GetScheduledMergeByPullID(ctx, pull.ID)
+ if err != nil {
+ ctx.ServerError("GetScheduledMergeByPullID", err)
+ return
+ }
+ }
+
+ // Get Dependencies
+ blockedBy, err := issue.BlockedByDependencies(ctx, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("BlockedByDependencies", err)
+ return
+ }
+ ctx.Data["BlockedByDependencies"], ctx.Data["BlockedByDependenciesNotPermitted"] = checkBlockedByIssues(ctx, blockedBy)
+ if ctx.Written() {
+ return
+ }
+
+ blocking, err := issue.BlockingDependencies(ctx)
+ if err != nil {
+ ctx.ServerError("BlockingDependencies", err)
+ return
+ }
+
+ ctx.Data["BlockingDependencies"], ctx.Data["BlockingDependenciesNotPermitted"] = checkBlockedByIssues(ctx, blocking)
+ if ctx.Written() {
+ return
+ }
+
+ var pinAllowed bool
+ if !issue.IsPinned() {
+ pinAllowed, err = issues_model.IsNewPinAllowed(ctx, issue.RepoID, issue.IsPull)
+ if err != nil {
+ ctx.ServerError("IsNewPinAllowed", err)
+ return
+ }
+ } else {
+ pinAllowed = true
+ }
+
+ ctx.Data["Participants"] = participants
+ ctx.Data["NumParticipants"] = len(participants)
+ ctx.Data["Issue"] = issue
+ ctx.Data["Reference"] = issue.Ref
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(ctx.Data["Link"].(string))
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.Data["HasProjectsWritePermission"] = ctx.Repo.CanWrite(unit.TypeProjects)
+ ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.Doer.IsAdmin)
+ ctx.Data["LockReasons"] = setting.Repository.Issue.LockReasons
+ ctx.Data["RefEndName"] = git.RefName(issue.Ref).ShortName()
+ ctx.Data["NewPinAllowed"] = pinAllowed
+ ctx.Data["PinEnabled"] = setting.Repository.Issue.MaxPinned != 0
+
+ prepareHiddenCommentType(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // For sidebar
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ ctx.HTML(http.StatusOK, tplIssueView)
+}
+
+// checkBlockedByIssues return canRead and notPermitted
+func checkBlockedByIssues(ctx *context.Context, blockers []*issues_model.DependencyInfo) (canRead, notPermitted []*issues_model.DependencyInfo) {
+ repoPerms := make(map[int64]access_model.Permission)
+ repoPerms[ctx.Repo.Repository.ID] = ctx.Repo.Permission
+ for _, blocker := range blockers {
+ // Get the permissions for this repository
+ // If the repo ID exists in the map, return the exist permissions
+ // else get the permission and add it to the map
+ var perm access_model.Permission
+ existPerm, ok := repoPerms[blocker.RepoID]
+ if ok {
+ perm = existPerm
+ } else {
+ var err error
+ perm, err = access_model.GetUserRepoPermission(ctx, &blocker.Repository, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil, nil
+ }
+ repoPerms[blocker.RepoID] = perm
+ }
+ if perm.CanReadIssuesOrPulls(blocker.Issue.IsPull) {
+ canRead = append(canRead, blocker)
+ } else {
+ notPermitted = append(notPermitted, blocker)
+ }
+ }
+ sortDependencyInfo(canRead)
+ sortDependencyInfo(notPermitted)
+ return canRead, notPermitted
+}
+
+func sortDependencyInfo(blockers []*issues_model.DependencyInfo) {
+ sort.Slice(blockers, func(i, j int) bool {
+ if blockers[i].RepoID == blockers[j].RepoID {
+ return blockers[i].Issue.CreatedUnix < blockers[j].Issue.CreatedUnix
+ }
+ return blockers[i].RepoID < blockers[j].RepoID
+ })
+}
+
+// GetActionIssue will return the issue which is used in the context.
+func GetActionIssue(ctx *context.Context) *issues_model.Issue {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetIssueByIndex", issues_model.IsErrIssueNotExist, err)
+ return nil
+ }
+ issue.Repo = ctx.Repo.Repository
+ checkIssueRights(ctx, issue)
+ if ctx.Written() {
+ return nil
+ }
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return nil
+ }
+ return issue
+}
+
+func checkIssueRights(ctx *context.Context, issue *issues_model.Issue) {
+ if issue.IsPull && !ctx.Repo.CanRead(unit.TypePullRequests) ||
+ !issue.IsPull && !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
+ }
+}
+
+func getActionIssues(ctx *context.Context) issues_model.IssueList {
+ commaSeparatedIssueIDs := ctx.FormString("issue_ids")
+ if len(commaSeparatedIssueIDs) == 0 {
+ return nil
+ }
+ issueIDs := make([]int64, 0, 10)
+ for _, stringIssueID := range strings.Split(commaSeparatedIssueIDs, ",") {
+ issueID, err := strconv.ParseInt(stringIssueID, 10, 64)
+ if err != nil {
+ ctx.ServerError("ParseInt", err)
+ return nil
+ }
+ issueIDs = append(issueIDs, issueID)
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ ctx.ServerError("GetIssuesByIDs", err)
+ return nil
+ }
+ // Check access rights for all issues
+ issueUnitEnabled := ctx.Repo.CanRead(unit.TypeIssues)
+ prUnitEnabled := ctx.Repo.CanRead(unit.TypePullRequests)
+ for _, issue := range issues {
+ if issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("some issue's RepoID is incorrect", errors.New("some issue's RepoID is incorrect"))
+ return nil
+ }
+ if issue.IsPull && !prUnitEnabled || !issue.IsPull && !issueUnitEnabled {
+ ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
+ return nil
+ }
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return nil
+ }
+ }
+ return issues
+}
+
+// GetIssueInfo get an issue of a repository
+func GetIssueInfo(ctx *context.Context) {
+ issue, err := issues_model.GetIssueWithAttrsByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.Error(http.StatusNotFound)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
+ }
+ return
+ }
+
+ if issue.IsPull {
+ // Need to check if Pulls are enabled and we can read Pulls
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ } else {
+ // Need to check if Issues are enabled and we can read Issues
+ if !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ ctx.JSON(http.StatusOK, convert.ToIssue(ctx, ctx.Doer, issue))
+}
+
+// UpdateIssueTitle change issue's title
+func UpdateIssueTitle(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ title := ctx.FormTrim("title")
+ if util.IsEmptyString(title) {
+ ctx.Error(http.StatusBadRequest, "Title cannot be empty or spaces")
+ return
+ }
+
+ // Creating a CreateIssueForm with the title so that we can validate the max title length
+ i := forms.CreateIssueForm{
+ Title: title,
+ }
+
+ bindingErr := binding.RawValidate(i)
+ if bindingErr.Has(binding.ERR_MAX_SIZE) {
+ ctx.Error(http.StatusBadRequest, "Title cannot be longer than 255 characters")
+ return
+ }
+
+ if err := issue_service.ChangeTitle(ctx, issue, ctx.Doer, title); err != nil {
+ ctx.ServerError("ChangeTitle", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "title": issue.Title,
+ })
+}
+
+// UpdateIssueRef change issue's ref (branch)
+func UpdateIssueRef(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) || issue.IsPull {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ ref := ctx.FormTrim("ref")
+
+ if err := issue_service.ChangeIssueRef(ctx, issue, ctx.Doer, ref); err != nil {
+ ctx.ServerError("ChangeRef", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ref": ref,
+ })
+}
+
+// UpdateIssueContent change issue's content
+func UpdateIssueContent(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if err := issue_service.ChangeContent(ctx, issue, ctx.Doer, ctx.Req.FormValue("content"), ctx.FormInt("content_version")); err != nil {
+ if errors.Is(err, issues_model.ErrIssueAlreadyChanged) {
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.pulls.edit.already_changed"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.edit.already_changed"))
+ }
+ } else {
+ ctx.ServerError("ChangeContent", err)
+ }
+ return
+ }
+
+ // when update the request doesn't intend to update attachments (eg: change checkbox state), ignore attachment updates
+ if !ctx.FormBool("ignore_attachments") {
+ if err := updateAttachments(ctx, issue, ctx.FormStrings("files[]")); err != nil {
+ ctx.ServerError("UpdateAttachments", err)
+ return
+ }
+ }
+
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, issue.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "content": content,
+ "contentVersion": issue.ContentVersion,
+ "attachments": attachmentsHTML(ctx, issue.Attachments, issue.Content),
+ })
+}
+
+// UpdateIssueDeadline updates an issue deadline
+func UpdateIssueDeadline(ctx *context.Context) {
+ form := web.GetForm(ctx).(*api.EditDeadlineOption)
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
+ }
+ return
+ }
+
+ if !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "", "Not repo writer")
+ return
+ }
+
+ var deadlineUnix timeutil.TimeStamp
+ var deadline time.Time
+ if form.Deadline != nil && !form.Deadline.IsZero() {
+ deadline = time.Date(form.Deadline.Year(), form.Deadline.Month(), form.Deadline.Day(),
+ 23, 59, 59, 0, time.Local)
+ deadlineUnix = timeutil.TimeStamp(deadline.Unix())
+ }
+
+ if err := issues_model.UpdateIssueDeadline(ctx, issue, deadlineUnix, ctx.Doer); err != nil {
+ ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err.Error())
+ return
+ }
+
+ ctx.JSON(http.StatusCreated, api.IssueDeadline{Deadline: &deadline})
+}
+
+// UpdateIssueMilestone change issue's milestone
+func UpdateIssueMilestone(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ milestoneID := ctx.FormInt64("id")
+ for _, issue := range issues {
+ oldMilestoneID := issue.MilestoneID
+ if oldMilestoneID == milestoneID {
+ continue
+ }
+ issue.MilestoneID = milestoneID
+ if err := issue_service.ChangeMilestoneAssign(ctx, issue, ctx.Doer, oldMilestoneID); err != nil {
+ ctx.ServerError("ChangeMilestoneAssign", err)
+ return
+ }
+ }
+
+ if ctx.FormBool("htmx") {
+ renderMilestones(ctx)
+ if ctx.Written() {
+ return
+ }
+ prepareHiddenCommentType(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ issue := issues[0]
+ var err error
+ if issue.MilestoneID > 0 {
+ issue.Milestone, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, issue.MilestoneID)
+ if err != nil {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ return
+ }
+ } else {
+ issue.Milestone = nil
+ }
+
+ comment := &issues_model.Comment{}
+ has, err := db.GetEngine(ctx).Where("issue_id = ? AND type = ?", issue.ID, issues_model.CommentTypeMilestone).OrderBy("id DESC").Limit(1).Get(comment)
+ if !has || err != nil {
+ ctx.ServerError("GetLatestMilestoneComment", err)
+ }
+ if err := comment.LoadMilestone(ctx); err != nil {
+ ctx.ServerError("LoadMilestone", err)
+ return
+ }
+ if err := comment.LoadPoster(ctx); err != nil {
+ ctx.ServerError("LoadPoster", err)
+ return
+ }
+ issue.Comments = issues_model.CommentList{comment}
+
+ ctx.Data["Issue"] = issue
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.HTML(http.StatusOK, "htmx/milestone_sidebar")
+ } else {
+ ctx.JSONOK()
+ }
+}
+
+// UpdateIssueAssignee change issue's or pull's assignee
+func UpdateIssueAssignee(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ assigneeID := ctx.FormInt64("id")
+ action := ctx.FormString("action")
+
+ for _, issue := range issues {
+ switch action {
+ case "clear":
+ if err := issue_service.DeleteNotPassedAssignee(ctx, issue, ctx.Doer, []*user_model.User{}); err != nil {
+ ctx.ServerError("ClearAssignees", err)
+ return
+ }
+ default:
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ valid, err := access_model.CanBeAssigned(ctx, assignee, issue.Repo, issue.IsPull)
+ if err != nil {
+ ctx.ServerError("canBeAssigned", err)
+ return
+ }
+ if !valid {
+ ctx.ServerError("canBeAssigned", repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: assigneeID, RepoName: issue.Repo.Name})
+ return
+ }
+
+ _, _, err = issue_service.ToggleAssigneeWithNotify(ctx, issue, ctx.Doer, assigneeID)
+ if err != nil {
+ ctx.ServerError("ToggleAssignee", err)
+ return
+ }
+ }
+ }
+ ctx.JSONOK()
+}
+
+// UpdatePullReviewRequest add or remove review request
+func UpdatePullReviewRequest(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ reviewID := ctx.FormInt64("id")
+ action := ctx.FormString("action")
+
+ // TODO: Not support 'clear' now
+ if action != "attach" && action != "detach" {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ for _, issue := range issues {
+ if err := issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("issue.LoadRepo", err)
+ return
+ }
+
+ if !issue.IsPull {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add review request for non-PR issue %-v#%d",
+ issue.Repo, issue.Index,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ if reviewID < 0 {
+ // negative reviewIDs represent team requests
+ if err := issue.Repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("issue.Repo.LoadOwner", err)
+ return
+ }
+
+ if !issue.Repo.Owner.IsOrganization() {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add team review request for %s#%d owned by non organization UID[%d]",
+ issue.Repo.FullName(), issue.Index, issue.Repo.ID,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ team, err := organization.GetTeamByID(ctx, -reviewID)
+ if err != nil {
+ ctx.ServerError("GetTeamByID", err)
+ return
+ }
+
+ if team.OrgID != issue.Repo.OwnerID {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add team review request for UID[%d] team %s to %s#%d owned by UID[%d]",
+ team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID)
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ err = issue_service.IsValidTeamReviewRequest(ctx, team, ctx.Doer, action == "attach", issue)
+ if err != nil {
+ if issues_model.IsErrNotValidReviewRequest(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add invalid team review request for UID[%d] team %s to %s#%d owned by UID[%d]: Error: %v",
+ team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("IsValidTeamReviewRequest", err)
+ return
+ }
+
+ _, err = issue_service.TeamReviewRequest(ctx, issue, ctx.Doer, team, action == "attach")
+ if err != nil {
+ ctx.ServerError("TeamReviewRequest", err)
+ return
+ }
+ continue
+ }
+
+ reviewer, err := user_model.GetUserByID(ctx, reviewID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: requested reviewer [%d] for %-v to %-v#%d is not exist: Error: %v",
+ reviewID, issue.Repo, issue.Index,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ err = issue_service.IsValidReviewRequest(ctx, reviewer, ctx.Doer, action == "attach", issue, nil)
+ if err != nil {
+ if issues_model.IsErrNotValidReviewRequest(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add invalid review request for %-v to %-v#%d: Error: %v",
+ reviewer, issue.Repo, issue.Index,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("isValidReviewRequest", err)
+ return
+ }
+
+ _, err = issue_service.ReviewRequest(ctx, issue, ctx.Doer, reviewer, action == "attach")
+ if err != nil {
+ if issues_model.IsErrReviewRequestOnClosedPR(err) {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("ReviewRequest", err)
+ return
+ }
+ }
+
+ ctx.JSONOK()
+}
+
+// SearchIssues searches for issues across the repositories that the user has access to
+func SearchIssues(ctx *context.Context) {
+ before, since, err := context.GetQueryBeforeSince(ctx.Base)
+ if err != nil {
+ log.Error("GetQueryBeforeSince: %v", err)
+ ctx.Error(http.StatusUnprocessableEntity, "invalid before or since")
+ return
+ }
+
+ var isClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isClosed = optional.Some(true)
+ case "all":
+ isClosed = optional.None[bool]()
+ default:
+ isClosed = optional.Some(false)
+ }
+
+ var (
+ repoIDs []int64
+ allPublic bool
+ )
+ {
+ // find repos user can access (for issue search)
+ opts := &repo_model.SearchRepoOptions{
+ Private: false,
+ AllPublic: true,
+ TopicOnly: false,
+ Collaborate: optional.None[bool](),
+ // This needs to be a column that is not nil in fixtures or
+ // MySQL will return different results when sorting by null in some cases
+ OrderBy: db.SearchOrderByAlphabetically,
+ Actor: ctx.Doer,
+ }
+ if ctx.IsSigned {
+ opts.Private = true
+ opts.AllLimited = true
+ }
+ if ctx.FormString("owner") != "" {
+ owner, err := user_model.GetUserByName(ctx, ctx.FormString("owner"))
+ if err != nil {
+ log.Error("GetUserByName: %v", err)
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Error(http.StatusBadRequest, "Owner not found", err.Error())
+ } else {
+ ctx.Error(http.StatusInternalServerError)
+ }
+ return
+ }
+ opts.OwnerID = owner.ID
+ opts.AllLimited = false
+ opts.AllPublic = false
+ opts.Collaborate = optional.Some(false)
+ }
+ if ctx.FormString("team") != "" {
+ if ctx.FormString("owner") == "" {
+ ctx.Error(http.StatusBadRequest, "Owner organisation is required for filtering on team")
+ return
+ }
+ team, err := organization.GetTeam(ctx, opts.OwnerID, ctx.FormString("team"))
+ if err != nil {
+ log.Error("GetTeam: %v", err)
+ if organization.IsErrTeamNotExist(err) {
+ ctx.Error(http.StatusBadRequest)
+ } else {
+ ctx.Error(http.StatusInternalServerError)
+ }
+ return
+ }
+ opts.TeamID = team.ID
+ }
+
+ if opts.AllPublic {
+ allPublic = true
+ opts.AllPublic = false // set it false to avoid returning too many repos, we could filter by indexer
+ }
+ repoIDs, _, err = repo_model.SearchRepositoryIDs(ctx, opts)
+ if err != nil {
+ log.Error("SearchRepositoryIDs: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ if len(repoIDs) == 0 {
+ // no repos found, don't let the indexer return all repos
+ repoIDs = []int64{0}
+ }
+ }
+
+ keyword := ctx.FormTrim("q")
+ if strings.IndexByte(keyword, 0) >= 0 {
+ keyword = ""
+ }
+
+ isPull := optional.None[bool]()
+ switch ctx.FormString("type") {
+ case "pulls":
+ isPull = optional.Some(true)
+ case "issues":
+ isPull = optional.Some(false)
+ }
+
+ var includedAnyLabels []int64
+ {
+ labels := ctx.FormTrim("labels")
+ var includedLabelNames []string
+ if len(labels) > 0 {
+ includedLabelNames = strings.Split(labels, ",")
+ }
+ includedAnyLabels, err = issues_model.GetLabelIDsByNames(ctx, includedLabelNames)
+ if err != nil {
+ log.Error("GetLabelIDsByNames: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ var includedMilestones []int64
+ {
+ milestones := ctx.FormTrim("milestones")
+ var includedMilestoneNames []string
+ if len(milestones) > 0 {
+ includedMilestoneNames = strings.Split(milestones, ",")
+ }
+ includedMilestones, err = issues_model.GetMilestoneIDsByNames(ctx, includedMilestoneNames)
+ if err != nil {
+ log.Error("GetMilestoneIDsByNames: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ projectID := optional.None[int64]()
+ if v := ctx.FormInt64("project"); v > 0 {
+ projectID = optional.Some(v)
+ }
+
+ // this api is also used in UI,
+ // so the default limit is set to fit UI needs
+ limit := ctx.FormInt("limit")
+ if limit == 0 {
+ limit = setting.UI.IssuePagingNum
+ } else if limit > setting.API.MaxResponseItems {
+ limit = setting.API.MaxResponseItems
+ }
+
+ searchOpt := &issue_indexer.SearchOptions{
+ Paginator: &db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: limit,
+ },
+ Keyword: keyword,
+ RepoIDs: repoIDs,
+ AllPublic: allPublic,
+ IsPull: isPull,
+ IsClosed: isClosed,
+ IncludedAnyLabelIDs: includedAnyLabels,
+ MilestoneIDs: includedMilestones,
+ ProjectID: projectID,
+ SortBy: issue_indexer.SortByCreatedDesc,
+ }
+
+ if since != 0 {
+ searchOpt.UpdatedAfterUnix = optional.Some(since)
+ }
+ if before != 0 {
+ searchOpt.UpdatedBeforeUnix = optional.Some(before)
+ }
+
+ if ctx.IsSigned {
+ ctxUserID := ctx.Doer.ID
+ if ctx.FormBool("created") {
+ searchOpt.PosterID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("assigned") {
+ searchOpt.AssigneeID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("mentioned") {
+ searchOpt.MentionID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("review_requested") {
+ searchOpt.ReviewRequestedID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("reviewed") {
+ searchOpt.ReviewedID = optional.Some(ctxUserID)
+ }
+ }
+
+ // FIXME: It's unsupported to sort by priority repo when searching by indexer,
+ // it's indeed an regression, but I think it is worth to support filtering by indexer first.
+ _ = ctx.FormInt64("priority_repo_id")
+
+ ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
+ if err != nil {
+ log.Error("SearchIssues: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ log.Error("GetIssuesByIDs: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ ctx.SetTotalCountHeader(total)
+ ctx.JSON(http.StatusOK, convert.ToIssueList(ctx, ctx.Doer, issues))
+}
+
+func getUserIDForFilter(ctx *context.Context, queryName string) int64 {
+ userName := ctx.FormString(queryName)
+ if len(userName) == 0 {
+ return 0
+ }
+
+ user, err := user_model.GetUserByName(ctx, userName)
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("", err)
+ return 0
+ }
+
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return 0
+ }
+
+ return user.ID
+}
+
+// ListIssues list the issues of a repository
+func ListIssues(ctx *context.Context) {
+ before, since, err := context.GetQueryBeforeSince(ctx.Base)
+ if err != nil {
+ ctx.Error(http.StatusUnprocessableEntity, err.Error())
+ return
+ }
+
+ var isClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isClosed = optional.Some(true)
+ case "all":
+ isClosed = optional.None[bool]()
+ default:
+ isClosed = optional.Some(false)
+ }
+
+ keyword := ctx.FormTrim("q")
+ if strings.IndexByte(keyword, 0) >= 0 {
+ keyword = ""
+ }
+
+ var labelIDs []int64
+ if split := strings.Split(ctx.FormString("labels"), ","); len(split) > 0 {
+ labelIDs, err = issues_model.GetLabelIDsInRepoByNames(ctx, ctx.Repo.Repository.ID, split)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ var mileIDs []int64
+ if part := strings.Split(ctx.FormString("milestones"), ","); len(part) > 0 {
+ for i := range part {
+ // uses names and fall back to ids
+ // non existent milestones are discarded
+ mile, err := issues_model.GetMilestoneByRepoIDANDName(ctx, ctx.Repo.Repository.ID, part[i])
+ if err == nil {
+ mileIDs = append(mileIDs, mile.ID)
+ continue
+ }
+ if !issues_model.IsErrMilestoneNotExist(err) {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ id, err := strconv.ParseInt(part[i], 10, 64)
+ if err != nil {
+ continue
+ }
+ mile, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, id)
+ if err == nil {
+ mileIDs = append(mileIDs, mile.ID)
+ continue
+ }
+ if issues_model.IsErrMilestoneNotExist(err) {
+ continue
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ }
+
+ projectID := optional.None[int64]()
+ if v := ctx.FormInt64("project"); v > 0 {
+ projectID = optional.Some(v)
+ }
+
+ isPull := optional.None[bool]()
+ switch ctx.FormString("type") {
+ case "pulls":
+ isPull = optional.Some(true)
+ case "issues":
+ isPull = optional.Some(false)
+ }
+
+ // FIXME: we should be more efficient here
+ createdByID := getUserIDForFilter(ctx, "created_by")
+ if ctx.Written() {
+ return
+ }
+ assignedByID := getUserIDForFilter(ctx, "assigned_by")
+ if ctx.Written() {
+ return
+ }
+ mentionedByID := getUserIDForFilter(ctx, "mentioned_by")
+ if ctx.Written() {
+ return
+ }
+
+ searchOpt := &issue_indexer.SearchOptions{
+ Paginator: &db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ Keyword: keyword,
+ RepoIDs: []int64{ctx.Repo.Repository.ID},
+ IsPull: isPull,
+ IsClosed: isClosed,
+ ProjectID: projectID,
+ SortBy: issue_indexer.SortByCreatedDesc,
+ }
+ if since != 0 {
+ searchOpt.UpdatedAfterUnix = optional.Some(since)
+ }
+ if before != 0 {
+ searchOpt.UpdatedBeforeUnix = optional.Some(before)
+ }
+ if len(labelIDs) == 1 && labelIDs[0] == 0 {
+ searchOpt.NoLabelOnly = true
+ } else {
+ for _, labelID := range labelIDs {
+ if labelID > 0 {
+ searchOpt.IncludedLabelIDs = append(searchOpt.IncludedLabelIDs, labelID)
+ } else {
+ searchOpt.ExcludedLabelIDs = append(searchOpt.ExcludedLabelIDs, -labelID)
+ }
+ }
+ }
+
+ if len(mileIDs) == 1 && mileIDs[0] == db.NoConditionID {
+ searchOpt.MilestoneIDs = []int64{0}
+ } else {
+ searchOpt.MilestoneIDs = mileIDs
+ }
+
+ if createdByID > 0 {
+ searchOpt.PosterID = optional.Some(createdByID)
+ }
+ if assignedByID > 0 {
+ searchOpt.AssigneeID = optional.Some(assignedByID)
+ }
+ if mentionedByID > 0 {
+ searchOpt.MentionID = optional.Some(mentionedByID)
+ }
+
+ ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "SearchIssues", err.Error())
+ return
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "FindIssuesByIDs", err.Error())
+ return
+ }
+
+ ctx.SetTotalCountHeader(total)
+ ctx.JSON(http.StatusOK, convert.ToIssueList(ctx, ctx.Doer, issues))
+}
+
+func BatchDeleteIssues(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ for _, issue := range issues {
+ if err := issue_service.DeleteIssue(ctx, ctx.Doer, ctx.Repo.GitRepo, issue); err != nil {
+ ctx.ServerError("DeleteIssue", err)
+ return
+ }
+ }
+ ctx.JSONOK()
+}
+
+// UpdateIssueStatus change issue's status
+func UpdateIssueStatus(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ var isClosed bool
+ switch action := ctx.FormString("action"); action {
+ case "open":
+ isClosed = false
+ case "close":
+ isClosed = true
+ default:
+ log.Warn("Unrecognized action: %s", action)
+ }
+
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("LoadRepositories", err)
+ return
+ }
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ ctx.ServerError("LoadPullRequests", err)
+ return
+ }
+
+ for _, issue := range issues {
+ if issue.IsPull && issue.PullRequest.HasMerged {
+ continue
+ }
+ if issue.IsClosed != isClosed {
+ if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", isClosed); err != nil {
+ if issues_model.IsErrDependenciesLeft(err) {
+ ctx.JSON(http.StatusPreconditionFailed, map[string]any{
+ "error": ctx.Tr("repo.issues.dependency.issue_batch_close_blocked", issue.Index),
+ })
+ return
+ }
+ ctx.ServerError("ChangeStatus", err)
+ return
+ }
+ }
+ }
+ ctx.JSONOK()
+}
+
+// NewComment create a comment for issue
+func NewComment(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateCommentForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin {
+ ctx.JSONError(ctx.Tr("repo.issues.comment_on_locked"))
+ return
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ var comment *issues_model.Comment
+ defer func() {
+ // Check if issue admin/poster changes the status of issue.
+ if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
+ (form.Status == "reopen" || form.Status == "close") &&
+ !(issue.IsPull && issue.PullRequest.HasMerged) {
+ // Duplication and conflict check should apply to reopen pull request.
+ var pr *issues_model.PullRequest
+
+ if form.Status == "reopen" && issue.IsPull {
+ pull := issue.PullRequest
+ var err error
+ pr, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow)
+ if err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ return
+ }
+ }
+
+ // Regenerate patch and test conflict.
+ if pr == nil {
+ issue.PullRequest.HeadCommitID = ""
+ pull_service.AddToTaskQueue(ctx, issue.PullRequest)
+ }
+
+ // check whether the ref of PR <refs/pulls/pr_index/head> in base repo is consistent with the head commit of head branch in the head repo
+ // get head commit of PR
+ if pull.Flow == issues_model.PullRequestFlowGithub {
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("Unable to load base repo", err)
+ return
+ }
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("Unable to load head repo", err)
+ return
+ }
+
+ // Check if the base branch of the pull request still exists.
+ if ok := git.IsBranchExist(ctx, pull.BaseRepo.RepoPath(), pull.BaseBranch); !ok {
+ ctx.JSONError(ctx.Tr("repo.pulls.reopen_failed.base_branch"))
+ return
+ }
+
+ // Check if the head branch of the pull request still exists.
+ if ok := git.IsBranchExist(ctx, pull.HeadRepo.RepoPath(), pull.HeadBranch); !ok {
+ ctx.JSONError(ctx.Tr("repo.pulls.reopen_failed.head_branch"))
+ return
+ }
+
+ prHeadRef := pull.GetGitRefName()
+ prHeadCommitID, err := git.GetFullCommitID(ctx, pull.BaseRepo.RepoPath(), prHeadRef)
+ if err != nil {
+ ctx.ServerError("Get head commit Id of pr fail", err)
+ return
+ }
+
+ headBranchRef := pull.GetGitHeadBranchRefName()
+ headBranchCommitID, err := git.GetFullCommitID(ctx, pull.HeadRepo.RepoPath(), headBranchRef)
+ if err != nil {
+ ctx.ServerError("Get head commit Id of head branch fail", err)
+ return
+ }
+
+ err = pull.LoadIssue(ctx)
+ if err != nil {
+ ctx.ServerError("load the issue of pull request error", err)
+ return
+ }
+
+ if prHeadCommitID != headBranchCommitID {
+ // force push to base repo
+ err := git.Push(ctx, pull.HeadRepo.RepoPath(), git.PushOptions{
+ Remote: pull.BaseRepo.RepoPath(),
+ Branch: pull.HeadBranch + ":" + prHeadRef,
+ Force: true,
+ Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo),
+ })
+ if err != nil {
+ ctx.ServerError("force push error", err)
+ return
+ }
+ }
+ }
+ }
+
+ if pr != nil {
+ ctx.Flash.Info(ctx.Tr("repo.pulls.open_unmerged_pull_exists", pr.Index))
+ } else {
+ isClosed := form.Status == "close"
+ if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", isClosed); err != nil {
+ log.Error("ChangeStatus: %v", err)
+
+ if issues_model.IsErrDependenciesLeft(err) {
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.issue_close_blocked"))
+ }
+ return
+ }
+ } else {
+ if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
+ ctx.ServerError("CreateOrStopIssueStopwatch", err)
+ return
+ }
+
+ log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed)
+ }
+ }
+ }
+
+ // Redirect to comment hashtag if there is any actual content.
+ typeName := "issues"
+ if issue.IsPull {
+ typeName = "pulls"
+ }
+ if comment != nil {
+ ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d#%s", ctx.Repo.RepoLink, typeName, issue.Index, comment.HashTag()))
+ } else {
+ ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, typeName, issue.Index))
+ }
+ }()
+
+ // Fix #321: Allow empty comments, as long as we have attachments.
+ if len(form.Content) == 0 && len(attachments) == 0 {
+ return
+ }
+
+ comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments)
+ if err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_by_user"))
+ } else {
+ ctx.ServerError("CreateIssueComment", err)
+ }
+ return
+ }
+
+ log.Trace("Comment created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, comment.ID)
+}
+
+// UpdateCommentContent change comment of issue's content
+func UpdateCommentContent(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ oldContent := comment.Content
+ newContent := ctx.FormString("content")
+ contentVersion := ctx.FormInt("content_version")
+
+ comment.Content = newContent
+ if err = issue_service.UpdateComment(ctx, comment, contentVersion, ctx.Doer, oldContent); err != nil {
+ if errors.Is(err, issues_model.ErrCommentAlreadyChanged) {
+ ctx.JSONError(ctx.Tr("repo.comments.edit.already_changed"))
+ } else {
+ ctx.ServerError("UpdateComment", err)
+ }
+ return
+ }
+
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+
+ // when the update request doesn't intend to update attachments (eg: change checkbox state), ignore attachment updates
+ if !ctx.FormBool("ignore_attachments") {
+ if err := updateAttachments(ctx, comment, ctx.FormStrings("files[]")); err != nil {
+ ctx.ServerError("UpdateAttachments", err)
+ return
+ }
+ }
+
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "content": content,
+ "contentVersion": comment.ContentVersion,
+ "attachments": attachmentsHTML(ctx, comment.Attachments, comment.Content),
+ })
+}
+
+// DeleteComment delete comment of issue
+func DeleteComment(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ } else if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ if err = issue_service.DeleteComment(ctx, ctx.Doer, comment); err != nil {
+ ctx.ServerError("DeleteComment", err)
+ return
+ }
+
+ ctx.Status(http.StatusOK)
+}
+
+// ChangeIssueReaction create a reaction for issue
+func ChangeIssueReaction(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ReactionForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.ServerError("ChangeIssueReaction", errors.New(ctx.GetErrMsg()))
+ return
+ }
+
+ switch ctx.Params(":action") {
+ case "react":
+ reaction, err := issue_service.CreateIssueReaction(ctx, ctx.Doer, issue, form.Content)
+ if err != nil {
+ if issues_model.IsErrForbiddenIssueReaction(err) {
+ ctx.ServerError("ChangeIssueReaction", err)
+ return
+ }
+ log.Info("CreateIssueReaction: %s", err)
+ break
+ }
+
+ log.Trace("Reaction for issue created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, reaction.ID)
+ case "unreact":
+ if err := issues_model.DeleteIssueReaction(ctx, ctx.Doer.ID, issue.ID, form.Content); err != nil {
+ ctx.ServerError("DeleteIssueReaction", err)
+ return
+ }
+
+ log.Trace("Reaction for issue removed: %d/%d", ctx.Repo.Repository.ID, issue.ID)
+ default:
+ ctx.NotFound(fmt.Sprintf("Unknown action %s", ctx.Params(":action")), nil)
+ return
+ }
+
+ // Reload new reactions
+ issue.Reactions = nil
+ if err := issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("ChangeIssueReaction.LoadAttributes", err)
+ return
+ }
+
+ if len(issue.Reactions) == 0 {
+ ctx.JSON(http.StatusOK, map[string]any{
+ "empty": true,
+ "html": "",
+ })
+ return
+ }
+
+ html, err := ctx.RenderToHTML(tplReactions, map[string]any{
+ "ctxData": ctx.Data,
+ "ActionURL": fmt.Sprintf("%s/issues/%d/reactions", ctx.Repo.RepoLink, issue.Index),
+ "Reactions": issue.Reactions.GroupByType(),
+ })
+ if err != nil {
+ ctx.ServerError("ChangeIssueReaction.HTMLString", err)
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]any{
+ "html": html,
+ })
+}
+
+// ChangeCommentReaction create a reaction for comment
+func ChangeCommentReaction(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ReactionForm)
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if comment.Issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ comment.Issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ switch ctx.Params(":action") {
+ case "react":
+ reaction, err := issue_service.CreateCommentReaction(ctx, ctx.Doer, comment.Issue, comment, form.Content)
+ if err != nil {
+ if issues_model.IsErrForbiddenIssueReaction(err) {
+ ctx.ServerError("ChangeIssueReaction", err)
+ return
+ }
+ log.Info("CreateCommentReaction: %s", err)
+ break
+ }
+
+ log.Trace("Reaction for comment created: %d/%d/%d/%d", ctx.Repo.Repository.ID, comment.Issue.ID, comment.ID, reaction.ID)
+ case "unreact":
+ if err := issues_model.DeleteCommentReaction(ctx, ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Content); err != nil {
+ ctx.ServerError("DeleteCommentReaction", err)
+ return
+ }
+
+ log.Trace("Reaction for comment removed: %d/%d/%d", ctx.Repo.Repository.ID, comment.Issue.ID, comment.ID)
+ default:
+ ctx.NotFound(fmt.Sprintf("Unknown action %s", ctx.Params(":action")), nil)
+ return
+ }
+
+ // Reload new reactions
+ comment.Reactions = nil
+ if err = comment.LoadReactions(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("ChangeCommentReaction.LoadReactions", err)
+ return
+ }
+
+ if len(comment.Reactions) == 0 {
+ ctx.JSON(http.StatusOK, map[string]any{
+ "empty": true,
+ "html": "",
+ })
+ return
+ }
+
+ html, err := ctx.RenderToHTML(tplReactions, map[string]any{
+ "ctxData": ctx.Data,
+ "ActionURL": fmt.Sprintf("%s/comments/%d/reactions", ctx.Repo.RepoLink, comment.ID),
+ "Reactions": comment.Reactions.GroupByType(),
+ })
+ if err != nil {
+ ctx.ServerError("ChangeCommentReaction.HTMLString", err)
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]any{
+ "html": html,
+ })
+}
+
+func addParticipant(poster *user_model.User, participants []*user_model.User) []*user_model.User {
+ for _, part := range participants {
+ if poster.ID == part.ID {
+ return participants
+ }
+ }
+ return append(participants, poster)
+}
+
+func filterXRefComments(ctx *context.Context, issue *issues_model.Issue) error {
+ // Remove comments that the user has no permissions to see
+ for i := 0; i < len(issue.Comments); {
+ c := issue.Comments[i]
+ if issues_model.CommentTypeIsRef(c.Type) && c.RefRepoID != issue.RepoID && c.RefRepoID != 0 {
+ var err error
+ // Set RefRepo for description in template
+ c.RefRepo, err = repo_model.GetRepositoryByID(ctx, c.RefRepoID)
+ if err != nil {
+ return err
+ }
+ perm, err := access_model.GetUserRepoPermission(ctx, c.RefRepo, ctx.Doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanReadIssuesOrPulls(c.RefIsPull) {
+ issue.Comments = append(issue.Comments[:i], issue.Comments[i+1:]...)
+ continue
+ }
+ }
+ i++
+ }
+ return nil
+}
+
+// GetIssueAttachments returns attachments for the issue
+func GetIssueAttachments(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ attachments := make([]*api.Attachment, len(issue.Attachments))
+ for i := 0; i < len(issue.Attachments); i++ {
+ attachments[i] = convert.ToAttachment(ctx.Repo.Repository, issue.Attachments[i])
+ }
+ ctx.JSON(http.StatusOK, attachments)
+}
+
+// GetCommentAttachments returns attachments for the comment
+func GetCommentAttachments(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.Repo.Permission.CanReadIssuesOrPulls(comment.Issue.IsPull) {
+ ctx.NotFound("CanReadIssuesOrPulls", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !comment.Type.HasAttachmentSupport() {
+ ctx.ServerError("GetCommentAttachments", fmt.Errorf("comment type %v does not support attachments", comment.Type))
+ return
+ }
+
+ attachments := make([]*api.Attachment, 0)
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+ for i := 0; i < len(comment.Attachments); i++ {
+ attachments = append(attachments, convert.ToAttachment(ctx.Repo.Repository, comment.Attachments[i]))
+ }
+ ctx.JSON(http.StatusOK, attachments)
+}
+
+func updateAttachments(ctx *context.Context, item any, files []string) error {
+ var attachments []*repo_model.Attachment
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ attachments = content.Attachments
+ case *issues_model.Comment:
+ attachments = content.Attachments
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ for i := 0; i < len(attachments); i++ {
+ if util.SliceContainsString(files, attachments[i].UUID) {
+ continue
+ }
+ if err := repo_model.DeleteAttachment(ctx, attachments[i], true); err != nil {
+ return err
+ }
+ }
+ var err error
+ if len(files) > 0 {
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ err = issues_model.UpdateIssueAttachments(ctx, content.ID, files)
+ case *issues_model.Comment:
+ err = content.UpdateAttachments(ctx, files)
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ if err != nil {
+ return err
+ }
+ }
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ content.Attachments, err = repo_model.GetAttachmentsByIssueID(ctx, content.ID)
+ case *issues_model.Comment:
+ content.Attachments, err = repo_model.GetAttachmentsByCommentID(ctx, content.ID)
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ return err
+}
+
+func attachmentsHTML(ctx *context.Context, attachments []*repo_model.Attachment, content string) template.HTML {
+ attachHTML, err := ctx.RenderToHTML(tplAttachment, map[string]any{
+ "ctxData": ctx.Data,
+ "Attachments": attachments,
+ "Content": content,
+ })
+ if err != nil {
+ ctx.ServerError("attachmentsHTML.HTMLString", err)
+ return ""
+ }
+ return attachHTML
+}
+
+// combineLabelComments combine the nearby label comments as one.
+func combineLabelComments(issue *issues_model.Issue) {
+ var prev, cur *issues_model.Comment
+ for i := 0; i < len(issue.Comments); i++ {
+ cur = issue.Comments[i]
+ if i > 0 {
+ prev = issue.Comments[i-1]
+ }
+ if i == 0 || cur.Type != issues_model.CommentTypeLabel ||
+ (prev != nil && prev.PosterID != cur.PosterID) ||
+ (prev != nil && cur.CreatedUnix-prev.CreatedUnix >= 60) {
+ if cur.Type == issues_model.CommentTypeLabel && cur.Label != nil {
+ if cur.Content != "1" {
+ cur.RemovedLabels = append(cur.RemovedLabels, cur.Label)
+ } else {
+ cur.AddedLabels = append(cur.AddedLabels, cur.Label)
+ }
+ }
+ continue
+ }
+
+ if cur.Label != nil { // now cur MUST be label comment
+ if prev.Type == issues_model.CommentTypeLabel { // we can combine them only prev is a label comment
+ if cur.Content != "1" {
+ // remove labels from the AddedLabels list if the label that was removed is already
+ // in this list, and if it's not in this list, add the label to RemovedLabels
+ addedAndRemoved := false
+ for i, label := range prev.AddedLabels {
+ if cur.Label.ID == label.ID {
+ prev.AddedLabels = append(prev.AddedLabels[:i], prev.AddedLabels[i+1:]...)
+ addedAndRemoved = true
+ break
+ }
+ }
+ if !addedAndRemoved {
+ prev.RemovedLabels = append(prev.RemovedLabels, cur.Label)
+ }
+ } else {
+ // remove labels from the RemovedLabels list if the label that was added is already
+ // in this list, and if it's not in this list, add the label to AddedLabels
+ removedAndAdded := false
+ for i, label := range prev.RemovedLabels {
+ if cur.Label.ID == label.ID {
+ prev.RemovedLabels = append(prev.RemovedLabels[:i], prev.RemovedLabels[i+1:]...)
+ removedAndAdded = true
+ break
+ }
+ }
+ if !removedAndAdded {
+ prev.AddedLabels = append(prev.AddedLabels, cur.Label)
+ }
+ }
+ prev.CreatedUnix = cur.CreatedUnix
+ // remove the current comment since it has been combined to prev comment
+ issue.Comments = append(issue.Comments[:i], issue.Comments[i+1:]...)
+ i--
+ } else { // if prev is not a label comment, start a new group
+ if cur.Content != "1" {
+ cur.RemovedLabels = append(cur.RemovedLabels, cur.Label)
+ } else {
+ cur.AddedLabels = append(cur.AddedLabels, cur.Label)
+ }
+ }
+ }
+ }
+}
+
+// get all teams that current user can mention
+func handleTeamMentions(ctx *context.Context) {
+ if ctx.Doer == nil || !ctx.Repo.Owner.IsOrganization() {
+ return
+ }
+
+ var isAdmin bool
+ var err error
+ var teams []*organization.Team
+ org := organization.OrgFromUser(ctx.Repo.Owner)
+ // Admin has super access.
+ if ctx.Doer.IsAdmin {
+ isAdmin = true
+ } else {
+ isAdmin, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("IsOwnedBy", err)
+ return
+ }
+ }
+
+ if isAdmin {
+ teams, err = org.LoadTeams(ctx)
+ if err != nil {
+ ctx.ServerError("LoadTeams", err)
+ return
+ }
+ } else {
+ teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
+ }
+ }
+
+ ctx.Data["MentionableTeams"] = teams
+ ctx.Data["MentionableTeamsOrg"] = ctx.Repo.Owner.Name
+ ctx.Data["MentionableTeamsOrgAvatar"] = ctx.Repo.Owner.AvatarLink(ctx)
+}
+
+type userSearchInfo struct {
+ UserID int64 `json:"user_id"`
+ UserName string `json:"username"`
+ AvatarLink string `json:"avatar_link"`
+ FullName string `json:"full_name"`
+}
+
+type userSearchResponse struct {
+ Results []*userSearchInfo `json:"results"`
+}
+
+// IssuePosters get posters for current repo's issues/pull requests
+func IssuePosters(ctx *context.Context) {
+ issuePosters(ctx, false)
+}
+
+func PullPosters(ctx *context.Context) {
+ issuePosters(ctx, true)
+}
+
+func issuePosters(ctx *context.Context, isPullList bool) {
+ repo := ctx.Repo.Repository
+ search := strings.TrimSpace(ctx.FormString("q"))
+ posters, err := repo_model.GetIssuePostersWithSearch(ctx, repo, isPullList, search, setting.UI.DefaultShowFullName)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+
+ if search == "" && ctx.Doer != nil {
+ // the returned posters slice only contains limited number of users,
+ // to make the current user (doer) can quickly filter their own issues, always add doer to the posters slice
+ if !slices.ContainsFunc(posters, func(user *user_model.User) bool { return user.ID == ctx.Doer.ID }) {
+ posters = append(posters, ctx.Doer)
+ }
+ }
+
+ posters = MakeSelfOnTop(ctx.Doer, posters)
+
+ resp := &userSearchResponse{}
+ resp.Results = make([]*userSearchInfo, len(posters))
+ for i, user := range posters {
+ resp.Results[i] = &userSearchInfo{UserID: user.ID, UserName: user.Name, AvatarLink: user.AvatarLink(ctx)}
+ if setting.UI.DefaultShowFullName {
+ resp.Results[i].FullName = user.FullName
+ }
+ }
+ ctx.JSON(http.StatusOK, resp)
+}
diff --git a/routers/web/repo/issue_content_history.go b/routers/web/repo/issue_content_history.go
new file mode 100644
index 0000000..16b250a
--- /dev/null
+++ b/routers/web/repo/issue_content_history.go
@@ -0,0 +1,237 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "html"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/avatars"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+)
+
+// GetContentHistoryOverview get overview
+func GetContentHistoryOverview(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ editedHistoryCountMap, _ := issues_model.QueryIssueContentHistoryEditedCountMap(ctx, issue.ID)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "i18n": map[string]any{
+ "textEdited": ctx.Tr("repo.issues.content_history.edited"),
+ "textDeleteFromHistory": ctx.Tr("repo.issues.content_history.delete_from_history"),
+ "textDeleteFromHistoryConfirm": ctx.Tr("repo.issues.content_history.delete_from_history_confirm"),
+ "textOptions": ctx.Tr("repo.issues.content_history.options"),
+ },
+ "editedHistoryCountMap": editedHistoryCountMap,
+ })
+}
+
+// GetContentHistoryList get list
+func GetContentHistoryList(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ commentID := ctx.FormInt64("comment_id")
+ items, _ := issues_model.FetchIssueContentHistoryList(ctx, issue.ID, commentID)
+
+ // render history list to HTML for frontend dropdown items: (name, value)
+ // name is HTML of "avatar + userName + userAction + timeSince"
+ // value is historyId
+ var results []map[string]any
+ for _, item := range items {
+ var actionText string
+ if item.IsDeleted {
+ actionTextDeleted := ctx.Locale.TrString("repo.issues.content_history.deleted")
+ actionText = "<i data-history-is-deleted='1'>" + actionTextDeleted + "</i>"
+ } else if item.IsFirstCreated {
+ actionText = ctx.Locale.TrString("repo.issues.content_history.created")
+ } else {
+ actionText = ctx.Locale.TrString("repo.issues.content_history.edited")
+ }
+
+ username := item.UserName
+ if setting.UI.DefaultShowFullName && strings.TrimSpace(item.UserFullName) != "" {
+ username = strings.TrimSpace(item.UserFullName)
+ }
+
+ src := html.EscapeString(item.UserAvatarLink)
+ class := avatars.DefaultAvatarClass + " tw-mr-2"
+ name := html.EscapeString(username)
+ avatarHTML := string(templates.AvatarHTML(src, 28, class, username))
+ timeSinceText := string(timeutil.TimeSinceUnix(item.EditedUnix, ctx.Locale))
+
+ results = append(results, map[string]any{
+ "name": avatarHTML + "<strong>" + name + "</strong> " + actionText + " " + timeSinceText,
+ "value": item.HistoryID,
+ })
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "results": results,
+ })
+}
+
+// canSoftDeleteContentHistory checks whether current user can soft-delete a history revision
+// Admins or owners can always delete history revisions. Normal users can only delete own history revisions.
+func canSoftDeleteContentHistory(ctx *context.Context, issue *issues_model.Issue, comment *issues_model.Comment,
+ history *issues_model.ContentHistory,
+) (canSoftDelete bool) {
+ // CanWrite means the doer can manage the issue/PR list
+ if ctx.Repo.IsOwner() || ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ canSoftDelete = true
+ } else if ctx.Doer == nil {
+ canSoftDelete = false
+ } else {
+ // for read-only users, they could still post issues or comments,
+ // they should be able to delete the history related to their own issue/comment, a case is:
+ // 1. the user posts some sensitive data
+ // 2. then the repo owner edits the post but didn't remove the sensitive data
+ // 3. the poster wants to delete the edited history revision
+ if comment == nil {
+ // the issue poster or the history poster can soft-delete
+ canSoftDelete = ctx.Doer.ID == issue.PosterID || ctx.Doer.ID == history.PosterID
+ canSoftDelete = canSoftDelete && (history.IssueID == issue.ID)
+ } else {
+ // the comment poster or the history poster can soft-delete
+ canSoftDelete = ctx.Doer.ID == comment.PosterID || ctx.Doer.ID == history.PosterID
+ canSoftDelete = canSoftDelete && (history.IssueID == issue.ID)
+ canSoftDelete = canSoftDelete && (history.CommentID == comment.ID)
+ }
+ }
+ return canSoftDelete
+}
+
+// GetContentHistoryDetail get detail
+func GetContentHistoryDetail(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ historyID := ctx.FormInt64("history_id")
+ history, prevHistory, err := issues_model.GetIssueContentHistoryAndPrev(ctx, issue.ID, historyID)
+ if err != nil {
+ ctx.JSON(http.StatusNotFound, map[string]any{
+ "message": "Can not find the content history",
+ })
+ return
+ }
+
+ // get the related comment if this history revision is for a comment, otherwise the history revision is for an issue.
+ var comment *issues_model.Comment
+ if history.CommentID != 0 {
+ var err error
+ if comment, err = issues_model.GetCommentByID(ctx, history.CommentID); err != nil {
+ log.Error("can not get comment for issue content history %v. err=%v", historyID, err)
+ return
+ }
+ }
+
+ // get the previous history revision (if exists)
+ var prevHistoryID int64
+ var prevHistoryContentText string
+ if prevHistory != nil {
+ prevHistoryID = prevHistory.ID
+ prevHistoryContentText = prevHistory.ContentText
+ }
+
+ // compare the current history revision with the previous one
+ dmp := diffmatchpatch.New()
+ // `checklines=false` makes better diff result
+ diff := dmp.DiffMain(prevHistoryContentText, history.ContentText, false)
+ diff = dmp.DiffCleanupSemantic(diff)
+ diff = dmp.DiffCleanupEfficiency(diff)
+
+ // use chroma to render the diff html
+ diffHTMLBuf := bytes.Buffer{}
+ diffHTMLBuf.WriteString("<pre class='chroma'>")
+ for _, it := range diff {
+ if it.Type == diffmatchpatch.DiffInsert {
+ diffHTMLBuf.WriteString("<span class='gi'>")
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ diffHTMLBuf.WriteString("</span>")
+ } else if it.Type == diffmatchpatch.DiffDelete {
+ diffHTMLBuf.WriteString("<span class='gd'>")
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ diffHTMLBuf.WriteString("</span>")
+ } else {
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ }
+ }
+ diffHTMLBuf.WriteString("</pre>")
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "canSoftDelete": canSoftDeleteContentHistory(ctx, issue, comment, history),
+ "historyId": historyID,
+ "prevHistoryId": prevHistoryID,
+ "diffHtml": diffHTMLBuf.String(),
+ })
+}
+
+// SoftDeleteContentHistory soft delete
+func SoftDeleteContentHistory(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ commentID := ctx.FormInt64("comment_id")
+ historyID := ctx.FormInt64("history_id")
+
+ var comment *issues_model.Comment
+ var history *issues_model.ContentHistory
+ var err error
+
+ if history, err = issues_model.GetIssueContentHistoryByID(ctx, historyID); err != nil {
+ log.Error("can not get issue content history %v. err=%v", historyID, err)
+ return
+ }
+ if history.IssueID != issue.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+ if commentID != 0 {
+ if history.CommentID != commentID {
+ ctx.NotFound("CompareCommentID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if comment, err = issues_model.GetCommentByID(ctx, commentID); err != nil {
+ log.Error("can not get comment for issue content history %v. err=%v", historyID, err)
+ return
+ }
+ if comment.IssueID != issue.ID {
+ ctx.NotFound("CompareIssueID", issues_model.ErrCommentNotExist{})
+ return
+ }
+ }
+
+ canSoftDelete := canSoftDeleteContentHistory(ctx, issue, comment, history)
+ if !canSoftDelete {
+ ctx.JSON(http.StatusForbidden, map[string]any{
+ "message": "Can not delete the content history",
+ })
+ return
+ }
+
+ err = issues_model.SoftDeleteIssueContentHistory(ctx, historyID)
+ log.Debug("soft delete issue content history. issue=%d, comment=%d, history=%d", issue.ID, commentID, historyID)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": err == nil,
+ })
+}
diff --git a/routers/web/repo/issue_dependency.go b/routers/web/repo/issue_dependency.go
new file mode 100644
index 0000000..66b3868
--- /dev/null
+++ b/routers/web/repo/issue_dependency.go
@@ -0,0 +1,144 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// AddDependency adds new dependencies
+func AddDependency(ctx *context.Context) {
+ issueIndex := ctx.ParamsInt64("index")
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, issueIndex)
+ if err != nil {
+ ctx.ServerError("GetIssueByIndex", err)
+ return
+ }
+
+ // Check if the Repo is allowed to have dependencies
+ if !ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies")
+ return
+ }
+
+ depID := ctx.FormInt64("newDependency")
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return
+ }
+
+ // Redirect
+ defer ctx.Redirect(issue.Link())
+
+ // Dependency
+ dep, err := issues_model.GetIssueByID(ctx, depID)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_issue_not_exist"))
+ return
+ }
+
+ // Check if both issues are in the same repo if cross repository dependencies is not enabled
+ if issue.RepoID != dep.RepoID {
+ if !setting.Service.AllowCrossRepositoryDependencies {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_not_same_repo"))
+ return
+ }
+ if err := dep.LoadRepo(ctx); err != nil {
+ ctx.ServerError("loadRepo", err)
+ return
+ }
+ // Can ctx.Doer read issues in the dep repo?
+ depRepoPerm, err := access_model.GetUserRepoPermission(ctx, dep.Repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if !depRepoPerm.CanReadIssuesOrPulls(dep.IsPull) {
+ // you can't see this dependency
+ return
+ }
+ }
+
+ // Check if issue and dependency is the same
+ if dep.ID == issue.ID {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_same_issue"))
+ return
+ }
+
+ err = issues_model.CreateIssueDependency(ctx, ctx.Doer, issue, dep)
+ if err != nil {
+ if issues_model.IsErrDependencyExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_exists"))
+ return
+ } else if issues_model.IsErrCircularDependency(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_cannot_create_circular"))
+ return
+ }
+ ctx.ServerError("CreateOrUpdateIssueDependency", err)
+ return
+ }
+}
+
+// RemoveDependency removes the dependency
+func RemoveDependency(ctx *context.Context) {
+ issueIndex := ctx.ParamsInt64("index")
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, issueIndex)
+ if err != nil {
+ ctx.ServerError("GetIssueByIndex", err)
+ return
+ }
+
+ // Check if the Repo is allowed to have dependencies
+ if !ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies")
+ return
+ }
+
+ depID := ctx.FormInt64("removeDependencyID")
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return
+ }
+
+ // Dependency Type
+ depTypeStr := ctx.Req.PostFormValue("dependencyType")
+
+ var depType issues_model.DependencyType
+
+ switch depTypeStr {
+ case "blockedBy":
+ depType = issues_model.DependencyTypeBlockedBy
+ case "blocking":
+ depType = issues_model.DependencyTypeBlocking
+ default:
+ ctx.Error(http.StatusBadRequest, "GetDependecyType")
+ return
+ }
+
+ // Dependency
+ dep, err := issues_model.GetIssueByID(ctx, depID)
+ if err != nil {
+ ctx.ServerError("GetIssueByID", err)
+ return
+ }
+
+ if err = issues_model.RemoveIssueDependency(ctx, ctx.Doer, issue, dep, depType); err != nil {
+ if issues_model.IsErrDependencyNotExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_not_exist"))
+ return
+ }
+ ctx.ServerError("RemoveIssueDependency", err)
+ return
+ }
+
+ // Redirect
+ ctx.Redirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_label.go b/routers/web/repo/issue_label.go
new file mode 100644
index 0000000..81bee4d
--- /dev/null
+++ b/routers/web/repo/issue_label.go
@@ -0,0 +1,229 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ issue_service "code.gitea.io/gitea/services/issue"
+)
+
+const (
+ tplLabels base.TplName = "repo/issue/labels"
+)
+
+// Labels render issue's labels page
+func Labels(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsLabels"] = true
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.HTML(http.StatusOK, tplLabels)
+}
+
+// InitializeLabels init labels for a repository
+func InitializeLabels(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.InitializeLabelsForm)
+ if ctx.HasError() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+
+ if err := repo_module.InitializeLabels(ctx, ctx.Repo.Repository.ID, form.TemplateName, false); err != nil {
+ if label.IsErrTemplateLoad(err) {
+ originalErr := err.(label.ErrTemplateLoad).OriginalError
+ ctx.Flash.Error(ctx.Tr("repo.issues.label_templates.fail_to_load_file", form.TemplateName, originalErr))
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+ ctx.ServerError("InitializeLabels", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// RetrieveLabels find all the labels of a repository and organization
+func RetrieveLabels(ctx *context.Context) {
+ labels, err := issues_model.GetLabelsByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("RetrieveLabels.GetLabels", err)
+ return
+ }
+
+ for _, l := range labels {
+ l.CalOpenIssues()
+ }
+
+ ctx.Data["Labels"] = labels
+
+ if ctx.Repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+ for _, l := range orgLabels {
+ l.CalOpenOrgIssues(ctx, ctx.Repo.Repository.ID, l.ID)
+ }
+ ctx.Data["OrgLabels"] = orgLabels
+
+ org, err := organization.GetOrgByName(ctx, ctx.Repo.Owner.LowerName)
+ if err != nil {
+ ctx.ServerError("GetOrgByName", err)
+ return
+ }
+ if ctx.Doer != nil {
+ ctx.Org.IsOwner, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("org.IsOwnedBy", err)
+ return
+ }
+ ctx.Org.OrgLink = org.AsUser().OrganisationLink()
+ ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner
+ ctx.Data["OrganizationLink"] = ctx.Org.OrgLink
+ }
+ }
+ ctx.Data["NumLabels"] = len(labels)
+ ctx.Data["SortType"] = ctx.FormString("sort")
+}
+
+// NewLabel create new label for repository
+func NewLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsLabels"] = true
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+
+ l := &issues_model.Label{
+ RepoID: ctx.Repo.Repository.ID,
+ Name: form.Title,
+ Exclusive: form.Exclusive,
+ Description: form.Description,
+ Color: form.Color,
+ }
+ if err := issues_model.NewLabel(ctx, l); err != nil {
+ ctx.ServerError("NewLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// UpdateLabel update a label's name and color
+func UpdateLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ l, err := issues_model.GetLabelInRepoByID(ctx, ctx.Repo.Repository.ID, form.ID)
+ if err != nil {
+ switch {
+ case issues_model.IsErrRepoLabelNotExist(err):
+ ctx.Error(http.StatusNotFound)
+ default:
+ ctx.ServerError("UpdateLabel", err)
+ }
+ return
+ }
+ l.Name = form.Title
+ l.Exclusive = form.Exclusive
+ l.Description = form.Description
+ l.Color = form.Color
+
+ l.SetArchived(form.IsArchived)
+ if err := issues_model.UpdateLabel(ctx, l); err != nil {
+ ctx.ServerError("UpdateLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// DeleteLabel delete a label
+func DeleteLabel(ctx *context.Context) {
+ if err := issues_model.DeleteLabel(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteLabel: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.issues.label_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// UpdateIssueLabel change issue's labels
+func UpdateIssueLabel(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ switch action := ctx.FormString("action"); action {
+ case "clear":
+ for _, issue := range issues {
+ if err := issue_service.ClearLabels(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("ClearLabels", err)
+ return
+ }
+ }
+ case "attach", "detach", "toggle", "toggle-alt":
+ label, err := issues_model.GetLabelByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ if issues_model.IsErrRepoLabelNotExist(err) {
+ ctx.Error(http.StatusNotFound, "GetLabelByID")
+ } else {
+ ctx.ServerError("GetLabelByID", err)
+ }
+ return
+ }
+
+ if action == "toggle" {
+ // detach if any issues already have label, otherwise attach
+ action = "attach"
+ if label.ExclusiveScope() == "" {
+ for _, issue := range issues {
+ if issues_model.HasIssueLabel(ctx, issue.ID, label.ID) {
+ action = "detach"
+ break
+ }
+ }
+ }
+ } else if action == "toggle-alt" {
+ // always detach with alt key pressed, to be able to remove
+ // scoped labels
+ action = "detach"
+ }
+
+ if action == "attach" {
+ for _, issue := range issues {
+ if err = issue_service.AddLabel(ctx, issue, ctx.Doer, label); err != nil {
+ ctx.ServerError("AddLabel", err)
+ return
+ }
+ }
+ } else {
+ for _, issue := range issues {
+ if err = issue_service.RemoveLabel(ctx, issue, ctx.Doer, label); err != nil {
+ ctx.ServerError("RemoveLabel", err)
+ return
+ }
+ }
+ }
+ default:
+ log.Warn("Unrecognized action: %s", action)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/repo/issue_label_test.go b/routers/web/repo/issue_label_test.go
new file mode 100644
index 0000000..2b4915e
--- /dev/null
+++ b/routers/web/repo/issue_label_test.go
@@ -0,0 +1,173 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strconv"
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func int64SliceToCommaSeparated(a []int64) string {
+ s := ""
+ for i, n := range a {
+ if i > 0 {
+ s += ","
+ }
+ s += strconv.Itoa(int(n))
+ }
+ return s
+}
+
+func TestInitializeLabels(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ require.NoError(t, repository.LoadRepoConfig())
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/initialize")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+ web.SetForm(ctx, &forms.InitializeLabelsForm{TemplateName: "Default"})
+ InitializeLabels(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ RepoID: 2,
+ Name: "enhancement",
+ Color: "#84b6eb",
+ })
+ assert.Equal(t, "/user2/repo2/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestRetrieveLabels(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ for _, testCase := range []struct {
+ RepoID int64
+ Sort string
+ ExpectedLabelIDs []int64
+ }{
+ {1, "", []int64{1, 2}},
+ {1, "leastissues", []int64{2, 1}},
+ {2, "", []int64{}},
+ } {
+ ctx, _ := contexttest.MockContext(t, "user/repo/issues")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, testCase.RepoID)
+ ctx.Req.Form.Set("sort", testCase.Sort)
+ RetrieveLabels(ctx)
+ assert.False(t, ctx.Written())
+ labels, ok := ctx.Data["Labels"].([]*issues_model.Label)
+ assert.True(t, ok)
+ if assert.Len(t, labels, len(testCase.ExpectedLabelIDs)) {
+ for i, label := range labels {
+ assert.EqualValues(t, testCase.ExpectedLabelIDs[i], label.ID)
+ }
+ }
+ }
+}
+
+func TestNewLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/edit")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.CreateLabelForm{
+ Title: "newlabel",
+ Color: "#abcdef",
+ })
+ NewLabel(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ Name: "newlabel",
+ Color: "#abcdef",
+ })
+ assert.Equal(t, "/user2/repo1/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestUpdateLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/edit")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.CreateLabelForm{
+ ID: 2,
+ Title: "newnameforlabel",
+ Color: "#abcdef",
+ IsArchived: true,
+ })
+ UpdateLabel(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ ID: 2,
+ Name: "newnameforlabel",
+ Color: "#abcdef",
+ })
+ assert.Equal(t, "/user2/repo1/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestDeleteLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/delete")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("id", "2")
+ DeleteLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ unittest.AssertNotExistsBean(t, &issues_model.Label{ID: 2})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{LabelID: 2})
+ assert.EqualValues(t, ctx.Tr("repo.issues.label_deletion_success"), ctx.Flash.SuccessMsg)
+}
+
+func TestUpdateIssueLabel_Clear(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("issue_ids", "1,3")
+ ctx.Req.Form.Set("action", "clear")
+ UpdateIssueLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 1})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 3})
+ unittest.CheckConsistencyFor(t, &issues_model.Label{})
+}
+
+func TestUpdateIssueLabel_Toggle(t *testing.T) {
+ for _, testCase := range []struct {
+ Action string
+ IssueIDs []int64
+ LabelID int64
+ ExpectedAdd bool // whether we expect the label to be added to the issues
+ }{
+ {"attach", []int64{1, 3}, 1, true},
+ {"detach", []int64{1, 3}, 1, false},
+ {"toggle", []int64{1, 3}, 1, false},
+ {"toggle", []int64{1, 2}, 2, true},
+ } {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("issue_ids", int64SliceToCommaSeparated(testCase.IssueIDs))
+ ctx.Req.Form.Set("action", testCase.Action)
+ ctx.Req.Form.Set("id", strconv.Itoa(int(testCase.LabelID)))
+ UpdateIssueLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ for _, issueID := range testCase.IssueIDs {
+ unittest.AssertExistsIf(t, testCase.ExpectedAdd, &issues_model.IssueLabel{
+ IssueID: issueID,
+ LabelID: testCase.LabelID,
+ })
+ }
+ unittest.CheckConsistencyFor(t, &issues_model.Label{})
+ }
+}
diff --git a/routers/web/repo/issue_lock.go b/routers/web/repo/issue_lock.go
new file mode 100644
index 0000000..1d5fc8a
--- /dev/null
+++ b/routers/web/repo/issue_lock.go
@@ -0,0 +1,65 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// LockIssue locks an issue. This would limit commenting abilities to
+// users with write access to the repo.
+func LockIssue(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.IssueLockForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if issue.IsLocked {
+ ctx.JSONError(ctx.Tr("repo.issues.lock_duplicate"))
+ return
+ }
+
+ if !form.HasValidReason() {
+ ctx.JSONError(ctx.Tr("repo.issues.lock.unknown_reason"))
+ return
+ }
+
+ if err := issues_model.LockIssue(ctx, &issues_model.IssueLockOptions{
+ Doer: ctx.Doer,
+ Issue: issue,
+ Reason: form.Reason,
+ }); err != nil {
+ ctx.ServerError("LockIssue", err)
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// UnlockIssue unlocks a previously locked issue.
+func UnlockIssue(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !issue.IsLocked {
+ ctx.JSONError(ctx.Tr("repo.issues.unlock_error"))
+ return
+ }
+
+ if err := issues_model.UnlockIssue(ctx, &issues_model.IssueLockOptions{
+ Doer: ctx.Doer,
+ Issue: issue,
+ }); err != nil {
+ ctx.ServerError("UnlockIssue", err)
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_pin.go b/routers/web/repo/issue_pin.go
new file mode 100644
index 0000000..365c812
--- /dev/null
+++ b/routers/web/repo/issue_pin.go
@@ -0,0 +1,107 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+// IssuePinOrUnpin pin or unpin a Issue
+func IssuePinOrUnpin(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // If we don't do this, it will crash when trying to add the pin event to the comment history
+ err := issue.LoadRepo(ctx)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ err = issue.PinOrUnpin(ctx, ctx.Doer)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// IssueUnpin unpins a Issue
+func IssueUnpin(ctx *context.Context) {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ // If we don't do this, it will crash when trying to add the pin event to the comment history
+ err = issue.LoadRepo(ctx)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ err = issue.Unpin(ctx, ctx.Doer)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
+
+// IssuePinMove moves a pinned Issue
+func IssuePinMove(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, "Only signed in users are allowed to perform this action.")
+ return
+ }
+
+ type movePinIssueForm struct {
+ ID int64 `json:"id"`
+ Position int `json:"position"`
+ }
+
+ form := &movePinIssueForm{}
+ if err := json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ issue, err := issues_model.GetIssueByID(ctx, form.ID)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ if issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.Status(http.StatusNotFound)
+ log.Error("Issue does not belong to this repository")
+ return
+ }
+
+ err = issue.MovePin(ctx, form.Position)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
diff --git a/routers/web/repo/issue_stopwatch.go b/routers/web/repo/issue_stopwatch.go
new file mode 100644
index 0000000..70d42b2
--- /dev/null
+++ b/routers/web/repo/issue_stopwatch.go
@@ -0,0 +1,113 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/eventsource"
+ "code.gitea.io/gitea/services/context"
+)
+
+// IssueStopwatch creates or stops a stopwatch for the given issue.
+func IssueStopwatch(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+
+ var showSuccessMessage bool
+
+ if !issues_model.StopwatchExists(c, c.Doer.ID, issue.ID) {
+ showSuccessMessage = true
+ }
+
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ if err := issues_model.CreateOrStopIssueStopwatch(c, c.Doer, issue); err != nil {
+ c.ServerError("CreateOrStopIssueStopwatch", err)
+ return
+ }
+
+ if showSuccessMessage {
+ c.Flash.Success(c.Tr("repo.issues.tracker_auto_close"))
+ }
+
+ url := issue.Link()
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// CancelStopwatch cancel the stopwatch
+func CancelStopwatch(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ if err := issues_model.CancelStopwatch(c, c.Doer, issue); err != nil {
+ c.ServerError("CancelStopwatch", err)
+ return
+ }
+
+ stopwatches, err := issues_model.GetUserStopwatches(c, c.Doer.ID, db.ListOptions{})
+ if err != nil {
+ c.ServerError("GetUserStopwatches", err)
+ return
+ }
+ if len(stopwatches) == 0 {
+ eventsource.GetManager().SendMessage(c.Doer.ID, &eventsource.Event{
+ Name: "stopwatches",
+ Data: "{}",
+ })
+ }
+
+ url := issue.Link()
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// GetActiveStopwatch is the middleware that sets .ActiveStopwatch on context
+func GetActiveStopwatch(ctx *context.Context) {
+ if strings.HasPrefix(ctx.Req.URL.Path, "/api") {
+ return
+ }
+
+ if !ctx.IsSigned {
+ return
+ }
+
+ _, sw, issue, err := issues_model.HasUserStopwatch(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("HasUserStopwatch", err)
+ return
+ }
+
+ if sw == nil || sw.ID == 0 {
+ return
+ }
+
+ ctx.Data["ActiveStopwatch"] = StopwatchTmplInfo{
+ issue.Link(),
+ issue.Repo.FullName(),
+ issue.Index,
+ sw.Seconds() + 1, // ensure time is never zero in ui
+ }
+}
+
+// StopwatchTmplInfo is a view on a stopwatch specifically for template rendering
+type StopwatchTmplInfo struct {
+ IssueLink string
+ RepoSlug string
+ IssueIndex int64
+ Seconds int64
+}
diff --git a/routers/web/repo/issue_test.go b/routers/web/repo/issue_test.go
new file mode 100644
index 0000000..f1d0aac
--- /dev/null
+++ b/routers/web/repo/issue_test.go
@@ -0,0 +1,375 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCombineLabelComments(t *testing.T) {
+ kases := []struct {
+ name string
+ beforeCombined []*issues_model.Comment
+ afterCombined []*issues_model.Comment
+ }{
+ {
+ name: "kase 1",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{},
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 2",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 70,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ CreatedUnix: 70,
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 3",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 2,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 2,
+ Content: "",
+ CreatedUnix: 0,
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 4",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/backport",
+ },
+ CreatedUnix: 10,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 10,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ {
+ Name: "kind/backport",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ },
+ },
+ {
+ name: "kase 5",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 2,
+ Content: "testtest",
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 2,
+ Content: "testtest",
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 6",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "reviewed/confirmed",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/feature",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "reviewed/confirmed",
+ },
+ {
+ Name: "kind/feature",
+ },
+ },
+ CreatedUnix: 0,
+ },
+ },
+ },
+ }
+
+ for _, kase := range kases {
+ t.Run(kase.name, func(t *testing.T) {
+ issue := issues_model.Issue{
+ Comments: kase.beforeCombined,
+ }
+ combineLabelComments(&issue)
+ assert.EqualValues(t, kase.afterCombined, issue.Comments)
+ })
+ }
+}
diff --git a/routers/web/repo/issue_timetrack.go b/routers/web/repo/issue_timetrack.go
new file mode 100644
index 0000000..241e434
--- /dev/null
+++ b/routers/web/repo/issue_timetrack.go
@@ -0,0 +1,87 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// AddTimeManually tracks time manually
+func AddTimeManually(c *context.Context) {
+ form := web.GetForm(c).(*forms.AddTimeManuallyForm)
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+ url := issue.Link()
+
+ if c.HasError() {
+ c.Flash.Error(c.GetErrMsg())
+ c.Redirect(url)
+ return
+ }
+
+ total := time.Duration(form.Hours)*time.Hour + time.Duration(form.Minutes)*time.Minute
+
+ if total <= 0 {
+ c.Flash.Error(c.Tr("repo.issues.add_time_sum_to_small"))
+ c.Redirect(url, http.StatusSeeOther)
+ return
+ }
+
+ if _, err := issues_model.AddTime(c, c.Doer, issue, int64(total.Seconds()), time.Now()); err != nil {
+ c.ServerError("AddTime", err)
+ return
+ }
+
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// DeleteTime deletes tracked time
+func DeleteTime(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ t, err := issues_model.GetTrackedTimeByID(c, c.ParamsInt64(":timeid"))
+ if err != nil {
+ if db.IsErrNotExist(err) {
+ c.NotFound("time not found", err)
+ return
+ }
+ c.Error(http.StatusInternalServerError, "GetTrackedTimeByID", err.Error())
+ return
+ }
+
+ // only OP or admin may delete
+ if !c.IsSigned || (!c.IsUserSiteAdmin() && c.Doer.ID != t.UserID) {
+ c.Error(http.StatusForbidden, "not allowed")
+ return
+ }
+
+ if err = issues_model.DeleteTime(c, t); err != nil {
+ c.ServerError("DeleteTime", err)
+ return
+ }
+
+ c.Flash.Success(c.Tr("repo.issues.del_time_history", util.SecToTime(t.Time)))
+ c.Redirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_watch.go b/routers/web/repo/issue_watch.go
new file mode 100644
index 0000000..5cff9f4
--- /dev/null
+++ b/routers/web/repo/issue_watch.go
@@ -0,0 +1,63 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strconv"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplWatching base.TplName = "repo/issue/view_content/sidebar/watching"
+)
+
+// IssueWatch sets issue watching
+func IssueWatch(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ watch, err := strconv.ParseBool(ctx.Req.PostFormValue("watch"))
+ if err != nil {
+ ctx.ServerError("watch is not bool", err)
+ return
+ }
+
+ if err := issues_model.CreateOrUpdateIssueWatch(ctx, ctx.Doer.ID, issue.ID, watch); err != nil {
+ ctx.ServerError("CreateOrUpdateIssueWatch", err)
+ return
+ }
+
+ ctx.Data["Issue"] = issue
+ ctx.Data["IssueWatch"] = &issues_model.IssueWatch{IsWatching: watch}
+ ctx.HTML(http.StatusOK, tplWatching)
+}
diff --git a/routers/web/repo/main_test.go b/routers/web/repo/main_test.go
new file mode 100644
index 0000000..6e469cf
--- /dev/null
+++ b/routers/web/repo/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go
new file mode 100644
index 0000000..ddda9f3
--- /dev/null
+++ b/routers/web/repo/middlewares.go
@@ -0,0 +1,120 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "strconv"
+
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/services/context"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+// SetEditorconfigIfExists set editor config as render variable
+func SetEditorconfigIfExists(ctx *context.Context) {
+ if ctx.Repo.Repository.IsEmpty {
+ return
+ }
+
+ ec, _, err := ctx.Repo.GetEditorconfig()
+
+ if err != nil && !git.IsErrNotExist(err) {
+ description := fmt.Sprintf("Error while getting .editorconfig file: %v", err)
+ if err := system_model.CreateRepositoryNotice(description); err != nil {
+ ctx.ServerError("ErrCreatingReporitoryNotice", err)
+ }
+ return
+ }
+
+ ctx.Data["Editorconfig"] = ec
+}
+
+// SetDiffViewStyle set diff style as render variable
+func SetDiffViewStyle(ctx *context.Context) {
+ queryStyle := ctx.FormString("style")
+
+ if !ctx.IsSigned {
+ ctx.Data["IsSplitStyle"] = queryStyle == "split"
+ return
+ }
+
+ var (
+ userStyle = ctx.Doer.DiffViewStyle
+ style string
+ )
+
+ if queryStyle == "unified" || queryStyle == "split" {
+ style = queryStyle
+ } else if userStyle == "unified" || userStyle == "split" {
+ style = userStyle
+ } else {
+ style = "unified"
+ }
+
+ ctx.Data["IsSplitStyle"] = style == "split"
+
+ opts := &user_service.UpdateOptions{
+ DiffViewStyle: optional.Some(style),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ }
+}
+
+// SetWhitespaceBehavior set whitespace behavior as render variable
+func SetWhitespaceBehavior(ctx *context.Context) {
+ const defaultWhitespaceBehavior = "show-all"
+ whitespaceBehavior := ctx.FormString("whitespace")
+ switch whitespaceBehavior {
+ case "", "ignore-all", "ignore-eol", "ignore-change":
+ break
+ default:
+ whitespaceBehavior = defaultWhitespaceBehavior
+ }
+ if ctx.IsSigned {
+ userWhitespaceBehavior, err := user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, defaultWhitespaceBehavior)
+ if err == nil {
+ if whitespaceBehavior == "" {
+ whitespaceBehavior = userWhitespaceBehavior
+ } else if whitespaceBehavior != userWhitespaceBehavior {
+ _ = user_model.SetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, whitespaceBehavior)
+ }
+ } // else: we can ignore the error safely
+ }
+
+ // these behaviors are for gitdiff.GetWhitespaceFlag
+ if whitespaceBehavior == "" {
+ ctx.Data["WhitespaceBehavior"] = defaultWhitespaceBehavior
+ } else {
+ ctx.Data["WhitespaceBehavior"] = whitespaceBehavior
+ }
+}
+
+// SetShowOutdatedComments set the show outdated comments option as context variable
+func SetShowOutdatedComments(ctx *context.Context) {
+ showOutdatedCommentsValue := ctx.FormString("show-outdated")
+ // var showOutdatedCommentsValue string
+
+ if showOutdatedCommentsValue != "true" && showOutdatedCommentsValue != "false" {
+ // invalid or no value for this form string -> use default or stored user setting
+ if ctx.IsSigned {
+ showOutdatedCommentsValue, _ = user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyShowOutdatedComments, "false")
+ } else {
+ // not logged in user -> use the default value
+ showOutdatedCommentsValue = "false"
+ }
+ } else {
+ // valid value -> update user setting if user is logged in
+ if ctx.IsSigned {
+ _ = user_model.SetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyShowOutdatedComments, showOutdatedCommentsValue)
+ }
+ }
+
+ showOutdatedComments, _ := strconv.ParseBool(showOutdatedCommentsValue)
+ ctx.Data["ShowOutdatedComments"] = showOutdatedComments
+}
diff --git a/routers/web/repo/migrate.go b/routers/web/repo/migrate.go
new file mode 100644
index 0000000..0acf966
--- /dev/null
+++ b/routers/web/repo/migrate.go
@@ -0,0 +1,310 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ admin_model "code.gitea.io/gitea/models/admin"
+ "code.gitea.io/gitea/models/db"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/migrations"
+ "code.gitea.io/gitea/services/task"
+)
+
+const (
+ tplMigrate base.TplName = "repo/migrate/migrate"
+)
+
+// Migrate render migration of repository page
+func Migrate(ctx *context.Context) {
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "Migrate: the site administrator has disabled migrations")
+ return
+ }
+
+ serviceType := structs.GitServiceType(ctx.FormInt("service_type"))
+
+ setMigrationContextData(ctx, serviceType)
+
+ if serviceType == 0 {
+ ctx.Data["Org"] = ctx.FormString("org")
+ ctx.Data["Mirror"] = ctx.FormString("mirror")
+
+ ctx.HTML(http.StatusOK, tplMigrate)
+ return
+ }
+
+ ctx.Data["private"] = getRepoPrivate(ctx)
+ ctx.Data["mirror"] = ctx.FormString("mirror") == "1"
+ ctx.Data["lfs"] = ctx.FormString("lfs") == "1"
+ ctx.Data["wiki"] = ctx.FormString("wiki") == "1"
+ ctx.Data["milestones"] = ctx.FormString("milestones") == "1"
+ ctx.Data["labels"] = ctx.FormString("labels") == "1"
+ ctx.Data["issues"] = ctx.FormString("issues") == "1"
+ ctx.Data["pull_requests"] = ctx.FormString("pull_requests") == "1"
+ ctx.Data["releases"] = ctx.FormString("releases") == "1"
+
+ ctxUser := checkContextUser(ctx, ctx.FormInt64("org"))
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ ctx.HTML(http.StatusOK, base.TplName("repo/migrate/"+serviceType.Name()))
+}
+
+func handleMigrateError(ctx *context.Context, owner *user_model.User, err error, name string, tpl base.TplName, form *forms.MigrateRepoForm) {
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "MigrateError: the site administrator has disabled migrations")
+ return
+ }
+
+ switch {
+ case migrations.IsRateLimitError(err):
+ ctx.RenderWithErr(ctx.Tr("form.visit_rate_limit"), tpl, form)
+ case migrations.IsTwoFactorAuthError(err):
+ ctx.RenderWithErr(ctx.Tr("form.2fa_auth_required"), tpl, form)
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tpl, form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tpl, form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tpl, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tpl, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tpl, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tpl, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tpl, form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form)
+ default:
+ err = util.SanitizeErrorCredentialURLs(err)
+ if strings.Contains(err.Error(), "Authentication failed") ||
+ strings.Contains(err.Error(), "Bad credentials") ||
+ strings.Contains(err.Error(), "could not read Username") {
+ ctx.Data["Err_Auth"] = true
+ ctx.RenderWithErr(ctx.Tr("form.auth_failed", err.Error()), tpl, form)
+ } else if strings.Contains(err.Error(), "fatal:") {
+ ctx.Data["Err_CloneAddr"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.failed", err.Error()), tpl, form)
+ } else {
+ ctx.ServerError(name, err)
+ }
+ }
+}
+
+func handleMigrateRemoteAddrError(ctx *context.Context, err error, tpl base.TplName, form *forms.MigrateRepoForm) {
+ if models.IsErrInvalidCloneAddr(err) {
+ addrErr := err.(*models.ErrInvalidCloneAddr)
+ switch {
+ case addrErr.IsProtocolInvalid:
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_protocol_invalid"), tpl, form)
+ case addrErr.IsURLError:
+ ctx.RenderWithErr(ctx.Tr("form.url_error", addrErr.Host), tpl, form)
+ case addrErr.IsPermissionDenied:
+ if addrErr.LocalPath {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied"), tpl, form)
+ } else {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied_blocked"), tpl, form)
+ }
+ case addrErr.IsInvalidPath:
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_local_path"), tpl, form)
+ default:
+ log.Error("Error whilst updating url: %v", err)
+ ctx.RenderWithErr(ctx.Tr("form.url_error", "unknown"), tpl, form)
+ }
+ } else {
+ log.Error("Error whilst updating url: %v", err)
+ ctx.RenderWithErr(ctx.Tr("form.url_error", "unknown"), tpl, form)
+ }
+}
+
+// MigratePost response for migrating from external git repository
+func MigratePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.MigrateRepoForm)
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "MigratePost: the site administrator has disabled migrations")
+ return
+ }
+
+ if form.Mirror && setting.Mirror.DisableNewPull {
+ ctx.Error(http.StatusBadRequest, "MigratePost: the site administrator has disabled creation of new mirrors")
+ return
+ }
+
+ setMigrationContextData(ctx, form.Service)
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ tpl := base.TplName("repo/migrate/" + form.Service.Name())
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tpl)
+ return
+ }
+
+ remoteAddr, err := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_CloneAddr"] = true
+ handleMigrateRemoteAddrError(ctx, err, tpl, form)
+ return
+ }
+
+ form.LFS = form.LFS && setting.LFS.StartServer
+
+ if form.LFS && len(form.LFSEndpoint) > 0 {
+ ep := lfs.DetermineEndpoint("", form.LFSEndpoint)
+ if ep == nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tpl, &form)
+ return
+ }
+ err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer)
+ if err != nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ handleMigrateRemoteAddrError(ctx, err, tpl, form)
+ return
+ }
+ }
+
+ opts := migrations.MigrateOptions{
+ OriginalURL: form.CloneAddr,
+ GitServiceType: form.Service,
+ CloneAddr: remoteAddr,
+ RepoName: form.RepoName,
+ Description: form.Description,
+ Private: form.Private || setting.Repository.ForcePrivate,
+ Mirror: form.Mirror,
+ LFS: form.LFS,
+ LFSEndpoint: form.LFSEndpoint,
+ AuthUsername: form.AuthUsername,
+ AuthPassword: form.AuthPassword,
+ AuthToken: form.AuthToken,
+ Wiki: form.Wiki,
+ Issues: form.Issues,
+ Milestones: form.Milestones,
+ Labels: form.Labels,
+ Comments: form.Issues || form.PullRequests,
+ PullRequests: form.PullRequests,
+ Releases: form.Releases,
+ }
+ if opts.Mirror {
+ opts.Issues = false
+ opts.Milestones = false
+ opts.Labels = false
+ opts.Comments = false
+ opts.PullRequests = false
+ opts.Releases = false
+ }
+
+ err = repo_model.CheckCreateRepository(ctx, ctx.Doer, ctxUser, opts.RepoName, false)
+ if err != nil {
+ handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form)
+ return
+ }
+
+ err = task.MigrateRepository(ctx, ctx.Doer, ctxUser, opts)
+ if err == nil {
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(opts.RepoName))
+ return
+ }
+
+ handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form)
+}
+
+func setMigrationContextData(ctx *context.Context, serviceType structs.GitServiceType) {
+ ctx.Data["Title"] = ctx.Tr("new_migrate.title")
+
+ ctx.Data["LFSActive"] = setting.LFS.StartServer
+ ctx.Data["IsForcedPrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+
+ // Plain git should be first
+ ctx.Data["Services"] = append([]structs.GitServiceType{structs.PlainGitService}, structs.SupportedFullGitService...)
+ ctx.Data["service"] = serviceType
+}
+
+func MigrateRetryPost(ctx *context.Context) {
+ ok, err := quota_model.EvaluateForUser(ctx, ctx.Repo.Repository.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ log.Error("quota_model.EvaluateForUser: %v", err)
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ if err := task.SetMigrateTaskMessage(ctx, ctx.Repo.Repository.ID, ctx.Locale.TrString("repo.settings.pull_mirror_sync_quota_exceeded")); err != nil {
+ log.Error("SetMigrateTaskMessage failed: %v", err)
+ ctx.ServerError("task.SetMigrateTaskMessage", err)
+ return
+ }
+ ctx.JSON(http.StatusRequestEntityTooLarge, map[string]any{
+ "ok": false,
+ "error": ctx.Tr("repo.settings.pull_mirror_sync_quota_exceeded"),
+ })
+ return
+ }
+
+ if err := task.RetryMigrateTask(ctx, ctx.Repo.Repository.ID); err != nil {
+ log.Error("Retry task failed: %v", err)
+ ctx.ServerError("task.RetryMigrateTask", err)
+ return
+ }
+ ctx.JSONOK()
+}
+
+func MigrateCancelPost(ctx *context.Context) {
+ migratingTask, err := admin_model.GetMigratingTask(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ log.Error("GetMigratingTask: %v", err)
+ ctx.Redirect(ctx.Repo.Repository.Link())
+ return
+ }
+ if migratingTask.Status == structs.TaskStatusRunning {
+ taskUpdate := &admin_model.Task{ID: migratingTask.ID, Status: structs.TaskStatusFailed, Message: "canceled"}
+ if err = taskUpdate.UpdateCols(ctx, "status", "message"); err != nil {
+ ctx.ServerError("task.UpdateCols", err)
+ return
+ }
+ }
+ ctx.Redirect(ctx.Repo.Repository.Link())
+}
diff --git a/routers/web/repo/milestone.go b/routers/web/repo/milestone.go
new file mode 100644
index 0000000..1c53f73
--- /dev/null
+++ b/routers/web/repo/milestone.go
@@ -0,0 +1,304 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/issue"
+
+ "xorm.io/builder"
+)
+
+const (
+ tplMilestone base.TplName = "repo/issue/milestones"
+ tplMilestoneNew base.TplName = "repo/issue/milestone_new"
+ tplMilestoneIssues base.TplName = "repo/issue/milestone_issues"
+)
+
+// Milestones render milestones page
+func Milestones(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+
+ isShowClosed := ctx.FormString("state") == "closed"
+ sortType := ctx.FormString("sort")
+ keyword := ctx.FormTrim("q")
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ miles, total, err := db.FindAndCount[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoID: ctx.Repo.Repository.ID,
+ IsClosed: optional.Some(isShowClosed),
+ SortType: sortType,
+ Name: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+
+ stats, err := issues_model.GetMilestonesStatsByRepoCondAndKw(ctx, builder.And(builder.Eq{"id": ctx.Repo.Repository.ID}), keyword)
+ if err != nil {
+ ctx.ServerError("GetMilestoneStats", err)
+ return
+ }
+ ctx.Data["OpenCount"] = stats.OpenCount
+ ctx.Data["ClosedCount"] = stats.ClosedCount
+ linkStr := "%s/milestones?state=%s&q=%s&sort=%s"
+ ctx.Data["OpenLink"] = fmt.Sprintf(linkStr, ctx.Repo.RepoLink, "open",
+ url.QueryEscape(keyword), url.QueryEscape(sortType))
+ ctx.Data["ClosedLink"] = fmt.Sprintf(linkStr, ctx.Repo.RepoLink, "closed",
+ url.QueryEscape(keyword), url.QueryEscape(sortType))
+
+ if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) {
+ if err := issues_model.MilestoneList(miles).LoadTotalTrackedTimes(ctx); err != nil {
+ ctx.ServerError("LoadTotalTrackedTimes", err)
+ return
+ }
+ }
+ for _, m := range miles {
+ m.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, m.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ }
+ ctx.Data["Milestones"] = miles
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ ctx.Data["SortType"] = sortType
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["IsShowClosed"] = isShowClosed
+
+ pager := context.NewPagination(int(total), setting.UI.IssuePagingNum, page, 5)
+ pager.AddParam(ctx, "state", "State")
+ pager.AddParam(ctx, "q", "Keyword")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplMilestone)
+}
+
+// NewMilestone render creating milestone page
+func NewMilestone(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+}
+
+// NewMilestonePost response for creating milestone
+func NewMilestonePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateMilestoneForm)
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+ return
+ }
+
+ if len(form.Deadline) == 0 {
+ form.Deadline = "9999-12-31"
+ }
+ deadline, err := time.ParseInLocation("2006-01-02", form.Deadline, time.Local)
+ if err != nil {
+ ctx.Data["Err_Deadline"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.milestones.invalid_due_date_format"), tplMilestoneNew, &form)
+ return
+ }
+
+ deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location())
+ if err = issues_model.NewMilestone(ctx, &issues_model.Milestone{
+ RepoID: ctx.Repo.Repository.ID,
+ Name: form.Title,
+ Content: form.Content,
+ DeadlineUnix: timeutil.TimeStamp(deadline.Unix()),
+ }); err != nil {
+ ctx.ServerError("NewMilestone", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.milestones.create_success", form.Title))
+ ctx.Redirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// EditMilestone render edting milestone page
+func EditMilestone(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.edit")
+ ctx.Data["PageIsMilestones"] = true
+ ctx.Data["PageIsEditMilestone"] = true
+
+ m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ }
+ return
+ }
+ ctx.Data["title"] = m.Name
+ ctx.Data["content"] = m.Content
+ if len(m.DeadlineString) > 0 {
+ ctx.Data["deadline"] = m.DeadlineString
+ }
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+}
+
+// EditMilestonePost response for edting milestone
+func EditMilestonePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateMilestoneForm)
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.edit")
+ ctx.Data["PageIsMilestones"] = true
+ ctx.Data["PageIsEditMilestone"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+ return
+ }
+
+ if len(form.Deadline) == 0 {
+ form.Deadline = "9999-12-31"
+ }
+ deadline, err := time.ParseInLocation("2006-01-02", form.Deadline, time.Local)
+ if err != nil {
+ ctx.Data["Err_Deadline"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.milestones.invalid_due_date_format"), tplMilestoneNew, &form)
+ return
+ }
+
+ deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location())
+ m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ }
+ return
+ }
+ m.Name = form.Title
+ m.Content = form.Content
+ m.DeadlineUnix = timeutil.TimeStamp(deadline.Unix())
+ if err = issues_model.UpdateMilestone(ctx, m, m.IsClosed); err != nil {
+ ctx.ServerError("UpdateMilestone", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.milestones.edit_success", m.Name))
+ ctx.Redirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// ChangeMilestoneStatus response for change a milestone's status
+func ChangeMilestoneStatus(ctx *context.Context) {
+ var toClose bool
+ switch ctx.Params(":action") {
+ case "open":
+ toClose = false
+ case "close":
+ toClose = true
+ default:
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones")
+ return
+ }
+ id := ctx.ParamsInt64(":id")
+
+ if err := issues_model.ChangeMilestoneStatusByRepoIDAndID(ctx, ctx.Repo.Repository.ID, id, toClose); err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", err)
+ } else {
+ ctx.ServerError("ChangeMilestoneStatusByIDAndRepoID", err)
+ }
+ return
+ }
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones?state=" + url.QueryEscape(ctx.Params(":action")))
+}
+
+// DeleteMilestone delete a milestone
+func DeleteMilestone(ctx *context.Context) {
+ if err := issues_model.DeleteMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteMilestoneByRepoID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.milestones.deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// MilestoneIssuesAndPulls lists all the issues and pull requests of the milestone
+func MilestoneIssuesAndPulls(ctx *context.Context) {
+ milestoneID := ctx.ParamsInt64(":id")
+ projectID := ctx.FormInt64("project")
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("GetMilestoneByID", err)
+ return
+ }
+
+ ctx.ServerError("GetMilestoneByID", err)
+ return
+ }
+
+ milestone.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, milestone.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.Data["Title"] = milestone.Name
+ ctx.Data["Milestone"] = milestone
+
+ issues(ctx, milestoneID, projectID, optional.None[bool]())
+
+ ret, _ := issue.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["NewIssueChooseTemplate"] = len(ret) > 0
+
+ ctx.Data["CanWriteIssues"] = ctx.Repo.CanWriteIssuesOrPulls(false)
+ ctx.Data["CanWritePulls"] = ctx.Repo.CanWriteIssuesOrPulls(true)
+
+ ctx.HTML(http.StatusOK, tplMilestoneIssues)
+}
diff --git a/routers/web/repo/packages.go b/routers/web/repo/packages.go
new file mode 100644
index 0000000..11874ab
--- /dev/null
+++ b/routers/web/repo/packages.go
@@ -0,0 +1,78 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplPackagesList base.TplName = "repo/packages"
+)
+
+// Packages displays a list of all packages in the repository
+func Packages(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ query := ctx.FormTrim("q")
+ packageType := ctx.FormTrim("type")
+
+ pvs, total, err := packages.SearchLatestVersions(ctx, &packages.PackageSearchOptions{
+ Paginator: &db.ListOptions{
+ PageSize: setting.UI.PackagesPagingNum,
+ Page: page,
+ },
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ Type: packages.Type(packageType),
+ Name: packages.SearchValue{Value: query},
+ IsInternal: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("SearchLatestVersions", err)
+ return
+ }
+
+ pds, err := packages.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptors", err)
+ return
+ }
+
+ hasPackages, err := packages.HasRepositoryPackages(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("HasRepositoryPackages", err)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["Query"] = query
+ ctx.Data["PackageType"] = packageType
+ ctx.Data["AvailableTypes"] = packages.TypeList
+ ctx.Data["HasPackages"] = hasPackages
+ if ctx.Repo != nil {
+ ctx.Data["CanWritePackages"] = ctx.IsUserRepoWriter([]unit.Type{unit.TypePackages}) || ctx.IsUserSiteAdmin()
+ }
+ ctx.Data["PackageDescriptors"] = pds
+ ctx.Data["Total"] = total
+ ctx.Data["RepositoryAccessMap"] = map[int64]bool{ctx.Repo.Repository.ID: true} // There is only the current repository
+
+ pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5)
+ pager.AddParam(ctx, "q", "Query")
+ pager.AddParam(ctx, "type", "PackageType")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplPackagesList)
+}
diff --git a/routers/web/repo/patch.go b/routers/web/repo/patch.go
new file mode 100644
index 0000000..d234f6c
--- /dev/null
+++ b/routers/web/repo/patch.go
@@ -0,0 +1,124 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/repository/files"
+)
+
+const (
+ tplPatchFile base.TplName = "repo/editor/patch"
+)
+
+// NewDiffPatch render create patch page
+func NewDiffPatch(ctx *context.Context) {
+ canCommit := renderCommitRights(ctx)
+
+ ctx.Data["PageIsPatch"] = true
+
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ ctx.HTML(200, tplPatchFile)
+}
+
+// NewDiffPatchPost response for sending patch page
+func NewDiffPatchPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+ ctx.Data["PageIsPatch"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["FileContent"] = form.Content
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+
+ if ctx.HasError() {
+ ctx.HTML(200, tplPatchFile)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplEditFile, &form)
+ return
+ }
+
+ // CommitSummary is optional in the web form, if empty, give it a default message based on add or update
+ // `message` will be both the summary and message combined
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ message = ctx.Locale.TrString("repo.editor.patch")
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdenitity := getGitIdentity(ctx, form.CommitMailID, tplPatchFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ fileResponse, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, &files.ApplyDiffPatchOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Content: strings.ReplaceAll(form.Content, "\r", ""),
+ Author: gitIdenitity,
+ Committer: gitIdenitity,
+ })
+ if err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplEditFile, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.editor.fail_to_apply_patch", err), tplPatchFile, &form)
+ return
+ }
+
+ if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/commit/" + fileResponse.Commit.SHA)
+ }
+}
diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go
new file mode 100644
index 0000000..878b7ee
--- /dev/null
+++ b/routers/web/repo/projects.go
@@ -0,0 +1,670 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/perm"
+ project_model "code.gitea.io/gitea/models/project"
+ attachment_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplProjects base.TplName = "repo/projects/list"
+ tplProjectsNew base.TplName = "repo/projects/new"
+ tplProjectsView base.TplName = "repo/projects/view"
+)
+
+// MustEnableProjects check if projects are enabled in settings
+func MustEnableProjects(ctx *context.Context) {
+ if unit.TypeProjects.UnitGlobalDisabled() {
+ ctx.NotFound("EnableRepoProjects", nil)
+ return
+ }
+
+ if ctx.Repo.Repository != nil {
+ if !ctx.Repo.CanRead(unit.TypeProjects) {
+ ctx.NotFound("MustEnableProjects", nil)
+ return
+ }
+ }
+}
+
+// Projects renders the home page of projects
+func Projects(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects")
+
+ sortType := ctx.FormTrim("sort")
+
+ isShowClosed := strings.ToLower(ctx.FormTrim("state")) == "closed"
+ keyword := ctx.FormTrim("q")
+ repo := ctx.Repo.Repository
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ ctx.Data["OpenCount"] = repo.NumOpenProjects
+ ctx.Data["ClosedCount"] = repo.NumClosedProjects
+
+ var total int
+ if !isShowClosed {
+ total = repo.NumOpenProjects
+ } else {
+ total = repo.NumClosedProjects
+ }
+
+ projects, count, err := db.FindAndCount[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.IssuePagingNum,
+ Page: page,
+ },
+ RepoID: repo.ID,
+ IsClosed: optional.Some(isShowClosed),
+ OrderBy: project_model.GetSearchOrderByBySortType(sortType),
+ Type: project_model.TypeRepository,
+ Title: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ for i := range projects {
+ projects[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, projects[i].Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ }
+
+ ctx.Data["Projects"] = projects
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ numPages := 0
+ if count > 0 {
+ numPages = (int(count) - 1/setting.UI.IssuePagingNum)
+ }
+
+ pager := context.NewPagination(total, setting.UI.IssuePagingNum, page, numPages)
+ pager.AddParam(ctx, "state", "State")
+ ctx.Data["Page"] = pager
+
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["IsShowClosed"] = isShowClosed
+ ctx.Data["IsProjectsPage"] = true
+ ctx.Data["SortType"] = sortType
+
+ ctx.HTML(http.StatusOK, tplProjects)
+}
+
+// RenderNewProject render creating a project page
+func RenderNewProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+ ctx.Data["TemplateConfigs"] = project_model.GetTemplateConfigs()
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CancelLink"] = ctx.Repo.Repository.Link() + "/projects"
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// NewProjectPost creates a new project
+func NewProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+
+ if ctx.HasError() {
+ RenderNewProject(ctx)
+ return
+ }
+
+ if err := project_model.NewProject(ctx, &project_model.Project{
+ RepoID: ctx.Repo.Repository.ID,
+ Title: form.Title,
+ Description: form.Content,
+ CreatorID: ctx.Doer.ID,
+ TemplateType: form.TemplateType,
+ CardType: form.CardType,
+ Type: project_model.TypeRepository,
+ }); err != nil {
+ ctx.ServerError("NewProject", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.create_success", form.Title))
+ ctx.Redirect(ctx.Repo.RepoLink + "/projects")
+}
+
+// ChangeProjectStatus updates the status of a project between "open" and "close"
+func ChangeProjectStatus(ctx *context.Context) {
+ var toClose bool
+ switch ctx.Params(":action") {
+ case "open":
+ toClose = false
+ case "close":
+ toClose = true
+ default:
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects")
+ return
+ }
+ id := ctx.ParamsInt64(":id")
+
+ if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, ctx.Repo.Repository.ID, id, toClose); err != nil {
+ ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ ctx.JSONRedirect(fmt.Sprintf("%s/projects/%d", ctx.Repo.RepoLink, id))
+}
+
+// DeleteProject delete a project
+func DeleteProject(ctx *context.Context) {
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ if err := project_model.DeleteProjectByID(ctx, p.ID); err != nil {
+ ctx.Flash.Error("DeleteProjectByID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.projects.deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects")
+}
+
+// RenderEditProject allows a project to be edited
+func RenderEditProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ctx.Data["projectID"] = p.ID
+ ctx.Data["title"] = p.Title
+ ctx.Data["content"] = p.Description
+ ctx.Data["card_type"] = p.CardType
+ ctx.Data["redirect"] = ctx.FormString("redirect")
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/projects/%d", ctx.Repo.Repository.Link(), p.ID)
+
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// EditProjectPost response for editing a project
+func EditProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ projectID := ctx.ParamsInt64(":id")
+
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/projects/%d", ctx.Repo.Repository.Link(), projectID)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+ return
+ }
+
+ p, err := project_model.GetProjectByID(ctx, projectID)
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ p.Title = form.Title
+ p.Description = form.Content
+ p.CardType = form.CardType
+ if err = project_model.UpdateProject(ctx, p); err != nil {
+ ctx.ServerError("UpdateProjects", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.edit_success", p.Title))
+ if ctx.FormString("redirect") == "project" {
+ ctx.Redirect(p.Link(ctx))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/projects")
+ }
+}
+
+// ViewProject renders the project with board view
+func ViewProject(ctx *context.Context) {
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ columns, err := project.GetColumns(ctx)
+ if err != nil {
+ ctx.ServerError("GetProjectColumns", err)
+ return
+ }
+
+ issuesMap, err := issues_model.LoadIssuesFromColumnList(ctx, columns)
+ if err != nil {
+ ctx.ServerError("LoadIssuesOfColumns", err)
+ return
+ }
+
+ if project.CardType != project_model.CardTypeTextOnly {
+ issuesAttachmentMap := make(map[int64][]*attachment_model.Attachment)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ if issueAttachment, err := attachment_model.GetAttachmentsByIssueIDImagesLatest(ctx, issue.ID); err == nil {
+ issuesAttachmentMap[issue.ID] = issueAttachment
+ }
+ }
+ }
+ ctx.Data["issuesAttachmentMap"] = issuesAttachmentMap
+ }
+
+ linkedPrsMap := make(map[int64][]*issues_model.Issue)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ var referencedIDs []int64
+ for _, comment := range issue.Comments {
+ if comment.RefIssueID != 0 && comment.RefIsPull {
+ referencedIDs = append(referencedIDs, comment.RefIssueID)
+ }
+ }
+
+ if len(referencedIDs) > 0 {
+ if linkedPrs, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ IssueIDs: referencedIDs,
+ IsPull: optional.Some(true),
+ }); err == nil {
+ linkedPrsMap[issue.ID] = linkedPrs
+ }
+ }
+ }
+ }
+ ctx.Data["LinkedPRs"] = linkedPrsMap
+
+ project.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, project.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.Data["IsProjectsPage"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["Project"] = project
+ ctx.Data["IssuesMap"] = issuesMap
+ ctx.Data["Columns"] = columns
+
+ ctx.HTML(http.StatusOK, tplProjectsView)
+}
+
+// UpdateIssueProject change an issue's project
+func UpdateIssueProject(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := issues.LoadProjects(ctx); err != nil {
+ ctx.ServerError("LoadProjects", err)
+ return
+ }
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("LoadProjects", err)
+ return
+ }
+
+ projectID := ctx.FormInt64("id")
+ for _, issue := range issues {
+ if issue.Project != nil && issue.Project.ID == projectID {
+ continue
+ }
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectID, 0); err != nil {
+ if errors.Is(err, util.ErrPermissionDenied) {
+ continue
+ }
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+
+ ctx.JSONOK()
+}
+
+// DeleteProjectColumn allows for the deletion of a project column
+func DeleteProjectColumn(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+
+ pb, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return
+ }
+ if pb.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", pb.ID, project.ID),
+ })
+ return
+ }
+
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", pb.ID, ctx.Repo.Repository.ID),
+ })
+ return
+ }
+
+ if err := project_model.DeleteColumnByID(ctx, ctx.ParamsInt64(":columnID")); err != nil {
+ ctx.ServerError("DeleteProjectColumnByID", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// AddColumnToProjectPost allows a new column to be added to a project.
+func AddColumnToProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectForRepoByID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+
+ if err := project_model.NewColumn(ctx, &project_model.Column{
+ ProjectID: project.ID,
+ Title: form.Title,
+ Color: form.Color,
+ CreatorID: ctx.Doer.ID,
+ }); err != nil {
+ ctx.ServerError("NewProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+func checkProjectColumnChangePermissions(ctx *context.Context) (*project_model.Project, *project_model.Column) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return nil, nil
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return nil, nil
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return nil, nil
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return nil, nil
+ }
+ if column.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", column.ID, project.ID),
+ })
+ return nil, nil
+ }
+
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", column.ID, ctx.Repo.Repository.ID),
+ })
+ return nil, nil
+ }
+ return project, column
+}
+
+// EditProjectColumn allows a project column's to be updated
+func EditProjectColumn(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+ _, column := checkProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if form.Title != "" {
+ column.Title = form.Title
+ }
+ column.Color = form.Color
+ if form.Sorting != 0 {
+ column.Sorting = form.Sorting
+ }
+
+ if err := project_model.UpdateColumn(ctx, column); err != nil {
+ ctx.ServerError("UpdateProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// SetDefaultProjectColumn set default column for uncategorized issues/pulls
+func SetDefaultProjectColumn(ctx *context.Context) {
+ project, column := checkProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := project_model.SetDefaultColumn(ctx, project.ID, column.ID); err != nil {
+ ctx.ServerError("SetDefaultColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// MoveIssues moves or keeps issues in a column and sorts them inside that column
+func MoveIssues(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("ProjectNotExist", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("InvalidRepoID", nil)
+ return
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ if project_model.IsErrProjectColumnNotExist(err) {
+ ctx.NotFound("ProjectColumnNotExist", nil)
+ } else {
+ ctx.ServerError("GetProjectColumn", err)
+ }
+ return
+ }
+
+ if column.ProjectID != project.ID {
+ ctx.NotFound("ColumnNotInProject", nil)
+ return
+ }
+
+ type movedIssuesForm struct {
+ Issues []struct {
+ IssueID int64 `json:"issueID"`
+ Sorting int64 `json:"sorting"`
+ } `json:"issues"`
+ }
+
+ form := &movedIssuesForm{}
+ if err = json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.ServerError("DecodeMovedIssuesForm", err)
+ }
+
+ issueIDs := make([]int64, 0, len(form.Issues))
+ sortedIssueIDs := make(map[int64]int64)
+ for _, issue := range form.Issues {
+ issueIDs = append(issueIDs, issue.IssueID)
+ sortedIssueIDs[issue.Sorting] = issue.IssueID
+ }
+ movedIssues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("IssueNotExisting", nil)
+ } else {
+ ctx.ServerError("GetIssueByID", err)
+ }
+ return
+ }
+
+ if len(movedIssues) != len(form.Issues) {
+ ctx.ServerError("some issues do not exist", errors.New("some issues do not exist"))
+ return
+ }
+
+ for _, issue := range movedIssues {
+ if issue.RepoID != project.RepoID {
+ ctx.ServerError("Some issue's repoID is not equal to project's repoID", errors.New("Some issue's repoID is not equal to project's repoID"))
+ return
+ }
+ }
+
+ if err = project_model.MoveIssuesOnProjectColumn(ctx, column, sortedIssueIDs); err != nil {
+ ctx.ServerError("MoveIssuesOnProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/repo/projects_test.go b/routers/web/repo/projects_test.go
new file mode 100644
index 0000000..d61230a
--- /dev/null
+++ b/routers/web/repo/projects_test.go
@@ -0,0 +1,27 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCheckProjectColumnChangePermissions(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/projects/1/2")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.SetParams(":id", "1")
+ ctx.SetParams(":columnID", "2")
+
+ project, column := checkProjectColumnChangePermissions(ctx)
+ assert.NotNil(t, project)
+ assert.NotNil(t, column)
+ assert.False(t, ctx.Written())
+}
diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go
new file mode 100644
index 0000000..bc85012
--- /dev/null
+++ b/routers/web/repo/pull.go
@@ -0,0 +1,1838 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "html"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ pull_model "code.gitea.io/gitea/models/pull"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ issue_template "code.gitea.io/gitea/modules/issue/template"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/automerge"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/gitdiff"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/gobwas/glob"
+)
+
+const (
+ tplFork base.TplName = "repo/pulls/fork"
+ tplCompareDiff base.TplName = "repo/diff/compare"
+ tplPullCommits base.TplName = "repo/pulls/commits"
+ tplPullFiles base.TplName = "repo/pulls/files"
+
+ pullRequestTemplateKey = "PullRequestTemplate"
+)
+
+var pullRequestTemplateCandidates = []string{
+ "PULL_REQUEST_TEMPLATE.md",
+ "PULL_REQUEST_TEMPLATE.yaml",
+ "PULL_REQUEST_TEMPLATE.yml",
+ "pull_request_template.md",
+ "pull_request_template.yaml",
+ "pull_request_template.yml",
+ ".forgejo/PULL_REQUEST_TEMPLATE.md",
+ ".forgejo/PULL_REQUEST_TEMPLATE.yaml",
+ ".forgejo/PULL_REQUEST_TEMPLATE.yml",
+ ".forgejo/pull_request_template.md",
+ ".forgejo/pull_request_template.yaml",
+ ".forgejo/pull_request_template.yml",
+ ".gitea/PULL_REQUEST_TEMPLATE.md",
+ ".gitea/PULL_REQUEST_TEMPLATE.yaml",
+ ".gitea/PULL_REQUEST_TEMPLATE.yml",
+ ".gitea/pull_request_template.md",
+ ".gitea/pull_request_template.yaml",
+ ".gitea/pull_request_template.yml",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/PULL_REQUEST_TEMPLATE.yaml",
+ ".github/PULL_REQUEST_TEMPLATE.yml",
+ ".github/pull_request_template.md",
+ ".github/pull_request_template.yaml",
+ ".github/pull_request_template.yml",
+}
+
+func getRepository(ctx *context.Context, repoID int64) *repo_model.Repository {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.NotFound("GetRepositoryByID", nil)
+ } else {
+ ctx.ServerError("GetRepositoryByID", err)
+ }
+ return nil
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+
+ if !perm.CanRead(unit.TypeCode) {
+ log.Trace("Permission Denied: User %-v cannot read %-v of repo %-v\n"+
+ "User in repo has Permissions: %-+v",
+ ctx.Doer,
+ unit.TypeCode,
+ ctx.Repo,
+ perm)
+ ctx.NotFound("getRepository", nil)
+ return nil
+ }
+ return repo
+}
+
+func updateForkRepositoryInContext(ctx *context.Context, forkRepo *repo_model.Repository) bool {
+ if forkRepo == nil {
+ ctx.NotFound("No repository in context", nil)
+ return false
+ }
+
+ if forkRepo.IsEmpty {
+ log.Trace("Empty repository %-v", forkRepo)
+ ctx.NotFound("updateForkRepositoryInContext", nil)
+ return false
+ }
+
+ if err := forkRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return false
+ }
+
+ ctx.Data["repo_name"] = forkRepo.Name
+ ctx.Data["description"] = forkRepo.Description
+ ctx.Data["IsPrivate"] = forkRepo.IsPrivate || forkRepo.Owner.Visibility == structs.VisibleTypePrivate
+ canForkToUser := forkRepo.OwnerID != ctx.Doer.ID && !repo_model.HasForkedRepo(ctx, ctx.Doer.ID, forkRepo.ID)
+
+ ctx.Data["ForkRepo"] = forkRepo
+
+ ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetOrgsCanCreateRepoByUserID", err)
+ return false
+ }
+ var orgs []*organization.Organization
+ for _, org := range ownedOrgs {
+ if forkRepo.OwnerID != org.ID && !repo_model.HasForkedRepo(ctx, org.ID, forkRepo.ID) {
+ orgs = append(orgs, org)
+ }
+ }
+
+ traverseParentRepo := forkRepo
+ for {
+ if ctx.Doer.ID == traverseParentRepo.OwnerID {
+ canForkToUser = false
+ } else {
+ for i, org := range orgs {
+ if org.ID == traverseParentRepo.OwnerID {
+ orgs = append(orgs[:i], orgs[i+1:]...)
+ break
+ }
+ }
+ }
+
+ if !traverseParentRepo.IsFork {
+ break
+ }
+ traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return false
+ }
+ }
+
+ ctx.Data["CanForkToUser"] = canForkToUser
+ ctx.Data["Orgs"] = orgs
+
+ if canForkToUser {
+ ctx.Data["ContextUser"] = ctx.Doer
+ } else if len(orgs) > 0 {
+ ctx.Data["ContextUser"] = orgs[0]
+ } else {
+ ctx.Data["CanForkRepo"] = false
+ ctx.RenderWithErr(ctx.Tr("repo.fork_no_valid_owners"), tplFork, nil)
+ return false
+ }
+
+ branches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ ListOptions: db.ListOptions{
+ ListAll: true,
+ },
+ IsDeletedBranch: optional.Some(false),
+ // Add it as the first option
+ ExcludeBranchNames: []string{ctx.Repo.Repository.DefaultBranch},
+ })
+ if err != nil {
+ ctx.ServerError("FindBranchNames", err)
+ return false
+ }
+ ctx.Data["Branches"] = append([]string{ctx.Repo.Repository.DefaultBranch}, branches...)
+
+ return true
+}
+
+// ForkByID redirects (with 301 Moved Permanently) to the repository's `/fork` page
+func ForkByID(ctx *context.Context) {
+ ctx.Redirect(ctx.Repo.Repository.Link()+"/fork", http.StatusMovedPermanently)
+}
+
+// Fork renders the repository fork page
+func Fork(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("new_fork")
+
+ if ctx.Doer.CanForkRepo() {
+ ctx.Data["CanForkRepo"] = true
+ } else {
+ maxCreationLimit := ctx.Doer.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.Flash.Error(msg, true)
+ }
+
+ if !updateForkRepositoryInContext(ctx, ctx.Repo.Repository) {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplFork)
+}
+
+// ForkPost response for forking a repository
+func ForkPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateRepoForm)
+ ctx.Data["Title"] = ctx.Tr("new_fork")
+ ctx.Data["CanForkRepo"] = true
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+
+ forkRepo := ctx.Repo.Repository
+ if !updateForkRepositoryInContext(ctx, forkRepo) {
+ return
+ }
+
+ ctx.Data["ContextUser"] = ctxUser
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplFork)
+ return
+ }
+
+ var err error
+ traverseParentRepo := forkRepo
+ for {
+ if ctxUser.ID == traverseParentRepo.OwnerID {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
+ return
+ }
+ repo := repo_model.GetForkedRepo(ctx, ctxUser.ID, traverseParentRepo.ID)
+ if repo != nil {
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
+ return
+ }
+ if !traverseParentRepo.IsFork {
+ break
+ }
+ traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return
+ }
+ }
+
+ // Check if user is allowed to create repo's on the organization.
+ if ctxUser.IsOrganization() {
+ isAllowedToFork, err := organization.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return
+ } else if !isAllowedToFork {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ repo, err := repo_service.ForkRepositoryAndUpdates(ctx, ctx.Doer, ctxUser, repo_service.ForkRepoOptions{
+ BaseRepo: forkRepo,
+ Name: form.RepoName,
+ Description: form.Description,
+ SingleBranch: form.ForkSingleBranch,
+ })
+ if err != nil {
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := ctxUser.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tplFork, &form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tplFork, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tplFork, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tplFork, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tplFork, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplFork, &form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplFork, &form)
+ default:
+ ctx.ServerError("ForkPost", err)
+ }
+ return
+ }
+
+ log.Trace("Repository forked[%d]: %s/%s", forkRepo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
+}
+
+func getPullInfo(ctx *context.Context) (issue *issues_model.Issue, ok bool) {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.ServerError("GetIssueByIndex", err)
+ }
+ return nil, false
+ }
+ if err = issue.LoadPoster(ctx); err != nil {
+ ctx.ServerError("LoadPoster", err)
+ return nil, false
+ }
+ if err := issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return nil, false
+ }
+ ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, emoji.ReplaceAliases(issue.Title))
+ ctx.Data["Issue"] = issue
+
+ if !issue.IsPull {
+ ctx.NotFound("ViewPullCommits", nil)
+ return nil, false
+ }
+
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ ctx.ServerError("LoadPullRequest", err)
+ return nil, false
+ }
+
+ if err = issue.PullRequest.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return nil, false
+ }
+
+ if ctx.IsSigned {
+ // Update issue-user.
+ if err = activities_model.SetIssueReadBy(ctx, issue.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return nil, false
+ }
+ }
+
+ return issue, true
+}
+
+func setMergeTarget(ctx *context.Context, pull *issues_model.PullRequest) {
+ if ctx.Repo.Owner.Name == pull.MustHeadUserName(ctx) {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + ":" + pull.HeadBranch
+ } else {
+ ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch
+ }
+
+ if pull.Flow == issues_model.PullRequestFlowAGit {
+ ctx.Data["MadeUsingAGit"] = true
+ }
+
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["HeadBranchLink"] = pull.GetHeadBranchLink(ctx)
+ ctx.Data["BaseBranchLink"] = pull.GetBaseBranchLink(ctx)
+}
+
+// GetPullDiffStats get Pull Requests diff stats
+func GetPullDiffStats(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ mergeBaseCommitID := GetMergedBaseCommitID(ctx, issue)
+
+ if mergeBaseCommitID == "" {
+ ctx.NotFound("PullFiles", nil)
+ return
+ }
+
+ headCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+
+ diffOptions := &gitdiff.DiffOptions{
+ BeforeCommitID: mergeBaseCommitID,
+ AfterCommitID: headCommitID,
+ MaxLines: setting.Git.MaxGitDiffLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: setting.Git.MaxGitDiffFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }
+
+ diff, err := gitdiff.GetPullDiffStats(ctx.Repo.GitRepo, diffOptions)
+ if err != nil {
+ ctx.ServerError("GetPullDiffStats", err)
+ return
+ }
+
+ ctx.Data["Diff"] = diff
+}
+
+func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) string {
+ pull := issue.PullRequest
+
+ var baseCommit string
+ // Some migrated PR won't have any Base SHA and lose history, try to get one
+ if pull.MergeBase == "" {
+ var commitSHA, parentCommit string
+ // If there is a head or a patch file, and it is readable, grab info
+ commitSHA, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ // Head File does not exist, try the patch
+ commitSHA, err = ctx.Repo.GitRepo.ReadPatchCommit(pull.Index)
+ if err == nil {
+ // Recreate pull head in files for next time
+ if err := ctx.Repo.GitRepo.SetReference(pull.GetGitRefName(), commitSHA); err != nil {
+ log.Error("Could not write head file", err)
+ }
+ } else {
+ // There is no history available
+ log.Trace("No history file available for PR %d", pull.Index)
+ }
+ }
+ if commitSHA != "" {
+ // Get immediate parent of the first commit in the patch, grab history back
+ parentCommit, _, err = git.NewCommand(ctx, "rev-list", "-1", "--skip=1").AddDynamicArguments(commitSHA).RunStdString(&git.RunOpts{Dir: ctx.Repo.GitRepo.Path})
+ if err == nil {
+ parentCommit = strings.TrimSpace(parentCommit)
+ }
+ // Special case on Git < 2.25 that doesn't fail on immediate empty history
+ if err != nil || parentCommit == "" {
+ log.Info("No known parent commit for PR %d, error: %v", pull.Index, err)
+ // bring at least partial history if it can work
+ parentCommit = commitSHA
+ }
+ }
+ baseCommit = parentCommit
+ } else {
+ // Keep an empty history or original commit
+ baseCommit = pull.MergeBase
+ }
+
+ return baseCommit
+}
+
+// PrepareMergedViewPullInfo show meta information for a merged pull request view page
+func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
+ pull := issue.PullRequest
+
+ setMergeTarget(ctx, pull)
+ ctx.Data["HasMerged"] = true
+
+ baseCommit := GetMergedBaseCommitID(ctx, issue)
+
+ compareInfo, err := ctx.Repo.GitRepo.GetCompareInfo(ctx.Repo.Repository.RepoPath(),
+ baseCommit, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "unknown revision or path not in the working tree") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+
+ if len(compareInfo.Commits) != 0 {
+ sha := compareInfo.Commits[0].ID.String()
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) != 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+ }
+
+ return compareInfo
+}
+
+// PrepareViewPullInfo show meta information for a pull request preview page
+func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+
+ repo := ctx.Repo.Repository
+ pull := issue.PullRequest
+
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return nil
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return nil
+ }
+
+ setMergeTarget(ctx, pull)
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return nil
+ }
+ ctx.Data["EnableStatusCheck"] = pb != nil && pb.EnableStatusCheck
+
+ var baseGitRepo *git.Repository
+ if pull.BaseRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
+ baseGitRepo = ctx.Repo.GitRepo
+ } else {
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer baseGitRepo.Close()
+ }
+
+ if !baseGitRepo.IsBranchExist(pull.BaseBranch) {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+
+ sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
+ return nil
+ }
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) > 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
+ pull.MergeBase, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+ return compareInfo
+ }
+
+ var headBranchExist bool
+ var headBranchSha string
+ // HeadRepo may be missing
+ if pull.HeadRepo != nil {
+ headGitRepo, err := gitrepo.OpenRepository(ctx, pull.HeadRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer headGitRepo.Close()
+
+ if pull.Flow == issues_model.PullRequestFlowGithub {
+ headBranchExist = headGitRepo.IsBranchExist(pull.HeadBranch)
+ } else {
+ headBranchExist = git.IsReferenceExist(ctx, baseGitRepo.Path, pull.GetGitRefName())
+ }
+
+ if headBranchExist {
+ if pull.Flow != issues_model.PullRequestFlowGithub {
+ headBranchSha, err = baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ } else {
+ headBranchSha, err = headGitRepo.GetBranchCommitID(pull.HeadBranch)
+ }
+ if err != nil {
+ ctx.ServerError("GetBranchCommitID", err)
+ return nil
+ }
+ }
+ }
+
+ if headBranchExist {
+ var err error
+ ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(ctx, pull, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToUpdate", err)
+ return nil
+ }
+ ctx.Data["GetCommitMessages"] = pull_service.GetSquashMergeCommitMessages(ctx, pull)
+ } else {
+ ctx.Data["GetCommitMessages"] = ""
+ }
+
+ sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.Data["IsPullRequestBroken"] = true
+ if pull.IsSameRepo() {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
+ } else {
+ ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
+ }
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+ ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
+ return nil
+ }
+
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) > 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+
+ if pb != nil && pb.EnableStatusCheck {
+ var missingRequiredChecks []string
+ for _, requiredContext := range pb.StatusCheckContexts {
+ contextFound := false
+ matchesRequiredContext := createRequiredContextMatcher(requiredContext)
+ for _, presentStatus := range commitStatuses {
+ if matchesRequiredContext(presentStatus.Context) {
+ contextFound = true
+ break
+ }
+ }
+
+ if !contextFound {
+ missingRequiredChecks = append(missingRequiredChecks, requiredContext)
+ }
+ }
+ ctx.Data["MissingRequiredChecks"] = missingRequiredChecks
+
+ ctx.Data["is_context_required"] = func(context string) bool {
+ for _, c := range pb.StatusCheckContexts {
+ if c == context {
+ return true
+ }
+ if gp, err := glob.Compile(c); err != nil {
+ // All newly created status_check_contexts are checked to ensure they are valid glob expressions before being stored in the database.
+ // But some old status_check_context created before glob was introduced may be invalid glob expressions.
+ // So log the error here for debugging.
+ log.Error("compile glob %q: %v", c, err)
+ } else if gp.Match(context) {
+ return true
+ }
+ }
+ return false
+ }
+ ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
+ }
+
+ ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha
+ ctx.Data["HeadBranchCommitID"] = headBranchSha
+ ctx.Data["PullHeadCommitID"] = sha
+
+ if pull.HeadRepo == nil || !headBranchExist || (!pull.Issue.IsClosed && (headBranchSha != sha)) {
+ ctx.Data["IsPullRequestBroken"] = true
+ if pull.IsSameRepo() {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
+ } else {
+ ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
+ }
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
+ git.BranchPrefix+pull.BaseBranch, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+
+ if compareInfo.HeadCommitID == compareInfo.MergeBase {
+ ctx.Data["IsNothingToCompare"] = true
+ }
+
+ if pull.IsWorkInProgress(ctx) {
+ ctx.Data["IsPullWorkInProgress"] = true
+ ctx.Data["WorkInProgressPrefix"] = pull.GetWorkInProgressPrefix(ctx)
+ }
+
+ if pull.IsFilesConflicted() {
+ ctx.Data["IsPullFilesConflicted"] = true
+ ctx.Data["ConflictedFiles"] = pull.ConflictedFiles
+ }
+
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+ return compareInfo
+}
+
+func createRequiredContextMatcher(requiredContext string) func(string) bool {
+ if gp, err := glob.Compile(requiredContext); err == nil {
+ return func(contextToCheck string) bool {
+ return gp.Match(contextToCheck)
+ }
+ }
+
+ return func(contextToCheck string) bool {
+ return requiredContext == contextToCheck
+ }
+}
+
+type pullCommitList struct {
+ Commits []pull_service.CommitInfo `json:"commits"`
+ LastReviewCommitSha string `json:"last_review_commit_sha"`
+ Locale map[string]any `json:"locale"`
+}
+
+// GetPullCommits get all commits for given pull request
+func GetPullCommits(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ resp := &pullCommitList{}
+
+ commits, lastReviewCommitSha, err := pull_service.GetPullCommits(ctx, issue)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+
+ // Get the needed locale
+ resp.Locale = map[string]any{
+ "lang": ctx.Locale.Language(),
+ "show_all_commits": ctx.Tr("repo.pulls.show_all_commits"),
+ "stats_num_commits": ctx.TrN(len(commits), "repo.activity.git_stats_commit_1", "repo.activity.git_stats_commit_n", len(commits)),
+ "show_changes_since_your_last_review": ctx.Tr("repo.pulls.show_changes_since_your_last_review"),
+ "select_commit_hold_shift_for_range": ctx.Tr("repo.pulls.select_commit_hold_shift_for_range"),
+ }
+
+ resp.Commits = commits
+ resp.LastReviewCommitSha = lastReviewCommitSha
+
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// ViewPullCommits show commits for a pull request
+func ViewPullCommits(ctx *context.Context) {
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullCommits"] = true
+
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var prInfo *git.CompareInfo
+ if pull.HasMerged {
+ prInfo = PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ prInfo = PrepareViewPullInfo(ctx, issue)
+ }
+
+ if ctx.Written() {
+ return
+ } else if prInfo == nil {
+ ctx.NotFound("ViewPullCommits", nil)
+ return
+ }
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+
+ commits := processGitCommits(ctx, prInfo.Commits)
+ ctx.Data["Commits"] = commits
+ ctx.Data["CommitCount"] = len(commits)
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+
+ // For PR commits page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+ getBranchData(ctx, issue)
+ ctx.HTML(http.StatusOK, tplPullCommits)
+}
+
+// ViewPullFiles render pull request changed files list page
+func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommit string, willShowSpecifiedCommitRange, willShowSpecifiedCommit bool) {
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullFiles"] = true
+
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var (
+ startCommitID string
+ endCommitID string
+ gitRepo = ctx.Repo.GitRepo
+ )
+
+ var prInfo *git.CompareInfo
+ if pull.HasMerged {
+ prInfo = PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ prInfo = PrepareViewPullInfo(ctx, issue)
+ }
+
+ // Validate the given commit sha to show (if any passed)
+ if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ foundStartCommit := len(specifiedStartCommit) == 0
+ foundEndCommit := len(specifiedEndCommit) == 0
+
+ if !(foundStartCommit && foundEndCommit) {
+ for _, commit := range prInfo.Commits {
+ if commit.ID.String() == specifiedStartCommit {
+ foundStartCommit = true
+ }
+ if commit.ID.String() == specifiedEndCommit {
+ foundEndCommit = true
+ }
+
+ if foundStartCommit && foundEndCommit {
+ break
+ }
+ }
+ }
+
+ if !(foundStartCommit && foundEndCommit) {
+ ctx.NotFound("Given SHA1 not found for this PR", nil)
+ return
+ }
+ }
+
+ if ctx.Written() {
+ return
+ } else if prInfo == nil {
+ ctx.NotFound("ViewPullFiles", nil)
+ return
+ }
+
+ headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+
+ ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
+
+ if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ if len(specifiedEndCommit) > 0 {
+ endCommitID = specifiedEndCommit
+ } else {
+ endCommitID = headCommitID
+ }
+ if len(specifiedStartCommit) > 0 {
+ startCommitID = specifiedStartCommit
+ } else {
+ startCommitID = prInfo.MergeBase
+ }
+ ctx.Data["IsShowingAllCommits"] = false
+ } else {
+ endCommitID = headCommitID
+ startCommitID = prInfo.MergeBase
+ ctx.Data["IsShowingAllCommits"] = true
+ }
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["AfterCommitID"] = endCommitID
+ ctx.Data["BeforeCommitID"] = startCommitID
+
+ fileOnly := ctx.FormBool("file-only")
+
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if fileOnly && (len(files) == 2 || len(files) == 1) {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diffOptions := &gitdiff.DiffOptions{
+ AfterCommitID: endCommitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }
+
+ if !willShowSpecifiedCommit {
+ diffOptions.BeforeCommitID = startCommitID
+ }
+
+ var methodWithError string
+ var diff *gitdiff.Diff
+
+ // if we're not logged in or only a single commit (or commit range) is shown we
+ // have to load only the diff and not get the viewed information
+ // as the viewed information is designed to be loaded only on latest PR
+ // diff and if you're signed in.
+ if !ctx.IsSigned || willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ diff, err = gitdiff.GetDiff(ctx, gitRepo, diffOptions, files...)
+ methodWithError = "GetDiff"
+ } else {
+ diff, err = gitdiff.SyncAndGetUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diffOptions, files...)
+ methodWithError = "SyncAndGetUserSpecificDiff"
+ }
+ if err != nil {
+ ctx.ServerError(methodWithError, err)
+ return
+ }
+
+ ctx.PageData["prReview"] = map[string]any{
+ "numberOfFiles": diff.NumFiles,
+ "numberOfViewedFiles": diff.NumViewedFiles,
+ }
+
+ if err = diff.LoadComments(ctx, issue, ctx.Doer, ctx.Data["ShowOutdatedComments"].(bool)); err != nil {
+ ctx.ServerError("LoadComments", err)
+ return
+ }
+
+ for _, file := range diff.Files {
+ for _, section := range file.Sections {
+ for _, line := range section.Lines {
+ for _, comments := range line.Conversations {
+ for _, comment := range comments {
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+ }
+ }
+ }
+ }
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return
+ }
+
+ if pb != nil {
+ glob := pb.GetProtectedFilePatterns()
+ if len(glob) != 0 {
+ for _, file := range diff.Files {
+ file.IsProtected = pb.IsProtectedFile(glob, file.Name)
+ }
+ }
+ }
+
+ ctx.Data["Diff"] = diff
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ baseCommit, err := ctx.Repo.GitRepo.GetCommit(startCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+ commit, err := gitRepo.GetCommit(endCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+
+ // determine if the user viewing the pull request can edit the head branch
+ if ctx.Doer != nil && pull.HeadRepo != nil && !pull.HasMerged {
+ headRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ ctx.Data["HeadBranchIsEditable"] = pull.HeadRepo.CanEnableEditor() && issues_model.CanMaintainerWriteToBranch(ctx, headRepoPerm, pull.HeadBranch, ctx.Doer)
+ ctx.Data["SourceRepoLink"] = pull.HeadRepo.Link()
+ ctx.Data["HeadBranch"] = pull.HeadBranch
+ }
+
+ if ctx.IsSigned && ctx.Doer != nil {
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ }
+
+ setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
+
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ currentReview, err := issues_model.GetCurrentReview(ctx, ctx.Doer, issue)
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("GetCurrentReview", err)
+ return
+ }
+ numPendingCodeComments := int64(0)
+ if currentReview != nil {
+ numPendingCodeComments, err = issues_model.CountComments(ctx, &issues_model.FindCommentsOptions{
+ Type: issues_model.CommentTypeCode,
+ ReviewID: currentReview.ID,
+ IssueID: issue.ID,
+ })
+ if err != nil {
+ ctx.ServerError("CountComments", err)
+ return
+ }
+ }
+ ctx.Data["CurrentReview"] = currentReview
+ ctx.Data["PendingCodeCommentNumber"] = numPendingCodeComments
+
+ getBranchData(ctx, issue)
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ // For files changed page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.HTML(http.StatusOK, tplPullFiles)
+}
+
+func ViewPullFilesForSingleCommit(ctx *context.Context) {
+ viewPullFiles(ctx, "", ctx.Params("sha"), true, true)
+}
+
+func ViewPullFilesForRange(ctx *context.Context) {
+ viewPullFiles(ctx, ctx.Params("shaFrom"), ctx.Params("shaTo"), true, false)
+}
+
+func ViewPullFilesStartingFromCommit(ctx *context.Context) {
+ viewPullFiles(ctx, "", ctx.Params("sha"), true, false)
+}
+
+func ViewPullFilesForAllCommitsOfPr(ctx *context.Context) {
+ viewPullFiles(ctx, "", "", false, false)
+}
+
+// UpdatePullRequest merge PR's baseBranch into headBranch
+func UpdatePullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ if issue.IsClosed {
+ ctx.NotFound("MergePullRequest", nil)
+ return
+ }
+ if issue.PullRequest.HasMerged {
+ ctx.NotFound("MergePullRequest", nil)
+ return
+ }
+
+ rebase := ctx.FormString("style") == "rebase"
+
+ if err := issue.PullRequest.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return
+ }
+ if err := issue.PullRequest.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return
+ }
+
+ allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(ctx, issue.PullRequest, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToMerge", err)
+ return
+ }
+
+ // ToDo: add check if maintainers are allowed to change branch ... (need migration & co)
+ if (!allowedUpdateByMerge && !rebase) || (rebase && !allowedUpdateByRebase) {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed"))
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ // default merge commit message
+ message := fmt.Sprintf("Merge branch '%s' into %s", issue.PullRequest.BaseBranch, issue.PullRequest.HeadBranch)
+
+ if err = pull_service.Update(ctx, issue.PullRequest, ctx.Doer, message, rebase); err != nil {
+ if models.IsErrMergeConflicts(err) {
+ conflictError := err.(models.ErrMergeConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.merge_conflict"),
+ "Summary": ctx.Tr("repo.pulls.merge_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.Redirect(issue.Link())
+ return
+ } else if models.IsErrRebaseConflicts(err) {
+ conflictError := err.(models.ErrRebaseConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
+ "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.Redirect(issue.Link())
+ return
+ }
+ ctx.Flash.Error(err.Error())
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ time.Sleep(1 * time.Second)
+
+ ctx.Flash.Success(ctx.Tr("repo.pulls.update_branch_success"))
+ ctx.Redirect(issue.Link())
+}
+
+// MergePullRequest response for merging pull request
+func MergePullRequest(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.MergePullRequestForm)
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ pr := issue.PullRequest
+ pr.Issue = issue
+ pr.Issue.Repo = ctx.Repo.Repository
+
+ manuallyMerged := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
+
+ mergeCheckType := pull_service.MergeCheckTypeGeneral
+ if form.MergeWhenChecksSucceed {
+ mergeCheckType = pull_service.MergeCheckTypeAuto
+ }
+ if manuallyMerged {
+ mergeCheckType = pull_service.MergeCheckTypeManually
+ }
+
+ // start with merging by checking
+ if err := pull_service.CheckPullMergeable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, mergeCheckType, form.ForceMerge); err != nil {
+ switch {
+ case errors.Is(err, pull_service.ErrIsClosed):
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.pulls.is_closed"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.closed_title"))
+ }
+ case errors.Is(err, pull_service.ErrUserNotAllowedToMerge):
+ ctx.JSONError(ctx.Tr("repo.pulls.update_not_allowed"))
+ case errors.Is(err, pull_service.ErrHasMerged):
+ ctx.JSONError(ctx.Tr("repo.pulls.has_merged"))
+ case errors.Is(err, pull_service.ErrIsWorkInProgress):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_wip"))
+ case errors.Is(err, pull_service.ErrNotMergeableState):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
+ case models.IsErrDisallowedToMerge(err):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
+ case asymkey_service.IsErrWontSign(err):
+ ctx.JSONError(err.Error()) // has no translation ...
+ case errors.Is(err, pull_service.ErrDependenciesLeft):
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ default:
+ ctx.ServerError("WebCheck", err)
+ }
+
+ return
+ }
+
+ // handle manually-merged mark
+ if manuallyMerged {
+ if err := pull_service.MergedManually(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
+ switch {
+ case models.IsErrInvalidMergeStyle(err):
+ ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
+ case strings.Contains(err.Error(), "Wrong commit ID"):
+ ctx.JSONError(ctx.Tr("repo.pulls.wrong_commit_id"))
+ default:
+ ctx.ServerError("MergedManually", err)
+ }
+
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+ return
+ }
+
+ message := strings.TrimSpace(form.MergeTitleField)
+ if len(message) == 0 {
+ var err error
+ message, _, err = pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pr, repo_model.MergeStyle(form.Do))
+ if err != nil {
+ ctx.ServerError("GetDefaultMergeMessage", err)
+ return
+ }
+ }
+
+ form.MergeMessageField = strings.TrimSpace(form.MergeMessageField)
+ if len(form.MergeMessageField) > 0 {
+ message += "\n\n" + form.MergeMessageField
+ }
+
+ if form.MergeWhenChecksSucceed {
+ // delete all scheduled auto merges
+ _ = pull_model.DeleteScheduledAutoMerge(ctx, pr.ID)
+ // schedule auto merge
+ scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message)
+ if err != nil {
+ ctx.ServerError("ScheduleAutoMerge", err)
+ return
+ } else if scheduled {
+ // nothing more to do ...
+ ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_newly_scheduled"))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, pr.Index))
+ return
+ }
+ }
+
+ if err := pull_service.Merge(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message, false); err != nil {
+ if models.IsErrInvalidMergeStyle(err) {
+ ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
+ } else if models.IsErrMergeConflicts(err) {
+ conflictError := err.(models.ErrMergeConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.merge_conflict"),
+ "Summary": ctx.Tr("repo.editor.merge_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrRebaseConflicts(err) {
+ conflictError := err.(models.ErrRebaseConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
+ "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrMergeUnrelatedHistories(err) {
+ log.Debug("MergeUnrelatedHistories error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.unrelated_histories"))
+ ctx.JSONRedirect(issue.Link())
+ } else if git.IsErrPushOutOfDate(err) {
+ log.Debug("MergePushOutOfDate error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.merge_out_of_date"))
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrSHADoesNotMatch(err) {
+ log.Debug("MergeHeadOutOfDate error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.head_out_of_date"))
+ ctx.JSONRedirect(issue.Link())
+ } else if git.IsErrPushRejected(err) {
+ log.Debug("MergePushRejected error: %v", err)
+ pushrejErr := err.(*git.ErrPushRejected)
+ message := pushrejErr.Message
+ if len(message) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.push_rejected_no_message"))
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.push_rejected"),
+ "Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ }
+ ctx.JSONRedirect(issue.Link())
+ } else {
+ ctx.ServerError("Merge", err)
+ }
+ return
+ }
+ log.Trace("Pull request merged: %d", pr.ID)
+
+ if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
+ ctx.ServerError("stopTimerIfAvailable", err)
+ return
+ }
+
+ log.Trace("Pull request merged: %d", pr.ID)
+
+ if form.DeleteBranchAfterMerge {
+ // Don't cleanup when other pr use this branch as head branch
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+ if exist {
+ ctx.JSONRedirect(issue.Link())
+ return
+ }
+
+ var headRepo *git.Repository
+ if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
+ headRepo = ctx.Repo.GitRepo
+ } else {
+ headRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
+ return
+ }
+ defer headRepo.Close()
+ }
+ deleteBranch(ctx, pr, headRepo)
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// CancelAutoMergePullRequest cancels a scheduled pr
+func CancelAutoMergePullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, issue.PullRequest); err != nil {
+ if db.IsErrNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.auto_merge_not_scheduled"))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+ ctx.ServerError("RemoveScheduledAutoMerge", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_canceled_schedule"))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
+}
+
+func stopTimerIfAvailable(ctx *context.Context, user *user_model.User, issue *issues_model.Issue) error {
+ if issues_model.StopwatchExists(ctx, user.ID, issue.ID) {
+ if err := issues_model.CreateOrStopIssueStopwatch(ctx, user, issue); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// CompareAndPullRequestPost response for creating pull request
+func CompareAndPullRequestPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateIssueForm)
+ ctx.Data["Title"] = ctx.Tr("repo.pulls.compare_changes")
+ ctx.Data["PageIsComparePull"] = true
+ ctx.Data["IsDiffCompare"] = true
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypePullRequests)
+
+ var (
+ repo = ctx.Repo.Repository
+ attachments []string
+ )
+
+ ci := ParseCompareInfo(ctx)
+ defer func() {
+ if ci != nil && ci.HeadGitRepo != nil {
+ ci.HeadGitRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+
+ labelIDs, assigneeIDs, milestoneID, projectID := ValidateRepoMetas(ctx, *form, true)
+ if ctx.Written() {
+ return
+ }
+
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.JSONError(ctx.Tr("repo.issues.new.title_empty"))
+ return
+ }
+
+ content := form.Content
+ if filename := ctx.Req.Form.Get("template-file"); filename != "" {
+ if template, err := issue_template.UnmarshalFromRepo(ctx.Repo.GitRepo, ctx.Repo.Repository.DefaultBranch, filename); err == nil {
+ content = issue_template.RenderToMarkdown(template, ctx.Req.Form)
+ }
+ }
+
+ pullIssue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Repo: repo,
+ Title: form.Title,
+ PosterID: ctx.Doer.ID,
+ Poster: ctx.Doer,
+ MilestoneID: milestoneID,
+ IsPull: true,
+ Content: content,
+ }
+ pullRequest := &issues_model.PullRequest{
+ HeadRepoID: ci.HeadRepo.ID,
+ BaseRepoID: repo.ID,
+ HeadBranch: ci.HeadBranch,
+ BaseBranch: ci.BaseBranch,
+ HeadRepo: ci.HeadRepo,
+ BaseRepo: repo,
+ MergeBase: ci.CompareInfo.MergeBase,
+ Type: issues_model.PullRequestGitea,
+ AllowMaintainerEdit: form.AllowMaintainerEdit,
+ }
+ // FIXME: check error in the case two people send pull request at almost same time, give nice error prompt
+ // instead of 500.
+
+ if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
+ switch {
+ case errors.Is(err, user_model.ErrBlockedByUser):
+ ctx.JSONError(ctx.Tr("repo.pulls.blocked_by_user"))
+ case repo_model.IsErrUserDoesNotHaveAccessToRepo(err):
+ ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
+ case git.IsErrPushRejected(err):
+ pushrejErr := err.(*git.ErrPushRejected)
+ message := pushrejErr.Message
+ if len(message) == 0 {
+ ctx.JSONError(ctx.Tr("repo.pulls.push_rejected_no_message"))
+ return
+ }
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.push_rejected"),
+ "Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
+ })
+ if err != nil {
+ ctx.ServerError("CompareAndPullRequest.HTMLString", err)
+ return
+ }
+ ctx.JSONError(flashError)
+ default:
+ // It's an unexpected error.
+ // If it happens, we should add another case to handle it.
+ log.Error("Unexpected error of NewPullRequest: %T %s", err, err)
+ ctx.ServerError("CompareAndPullRequest", err)
+ }
+ ctx.ServerError("NewPullRequest", err)
+ return
+ }
+
+ if projectID > 0 && ctx.Repo.CanWrite(unit.TypeProjects) {
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, pullIssue, ctx.Doer, projectID, 0); err != nil {
+ if !errors.Is(err, util.ErrPermissionDenied) {
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+ }
+
+ log.Trace("Pull request created: %d/%d", repo.ID, pullIssue.ID)
+ ctx.JSONRedirect(pullIssue.Link())
+}
+
+// CleanUpPullRequest responses for delete merged branch when PR has been merged
+func CleanUpPullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ pr := issue.PullRequest
+
+ // Don't cleanup unmerged and unclosed PRs
+ if !pr.HasMerged && !issue.IsClosed {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ // Don't cleanup when there are other PR's that use this branch as head branch.
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+ if exist {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return
+ } else if pr.HeadRepo == nil {
+ // Forked repository has already been deleted
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ } else if err = pr.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return
+ } else if err = pr.HeadRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("HeadRepo.LoadOwner", err)
+ return
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if !perm.CanWrite(unit.TypeCode) {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch
+
+ var gitBaseRepo *git.Repository
+
+ // Assume that the base repo is the current context (almost certainly)
+ if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.BaseRepoID && ctx.Repo.GitRepo != nil {
+ gitBaseRepo = ctx.Repo.GitRepo
+ } else {
+ // If not just open it
+ gitBaseRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.FullName()), err)
+ return
+ }
+ defer gitBaseRepo.Close()
+ }
+
+ // Now assume that the head repo is the same as the base repo (reasonable chance)
+ gitRepo := gitBaseRepo
+ // But if not: is it the same as the context?
+ if pr.BaseRepoID != pr.HeadRepoID && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil {
+ gitRepo = ctx.Repo.GitRepo
+ } else if pr.BaseRepoID != pr.HeadRepoID {
+ // Otherwise just load it up
+ gitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
+ return
+ }
+ defer gitRepo.Close()
+ }
+
+ defer func() {
+ ctx.JSONRedirect(issue.Link())
+ }()
+
+ // Check if branch has no new commits
+ headCommitID, err := gitBaseRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+ branchCommitID, err := gitRepo.GetBranchCommitID(pr.HeadBranch)
+ if err != nil {
+ log.Error("GetBranchCommitID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+ if headCommitID != branchCommitID {
+ ctx.Flash.Error(ctx.Tr("repo.branch.delete_branch_has_new_commits", fullBranchName))
+ return
+ }
+
+ deleteBranch(ctx, pr, gitRepo)
+}
+
+func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
+ fullBranchName := pr.HeadRepo.FullName() + ":" + pr.HeadBranch
+
+ if err := pull_service.RetargetChildrenOnMerge(ctx, ctx.Doer, pr); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+
+ if err := repo_service.DeleteBranch(ctx, ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil {
+ switch {
+ case git.IsErrBranchNotExist(err):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ case errors.Is(err, repo_service.ErrBranchIsDefault):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ case errors.Is(err, git_model.ErrBranchIsProtected):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ default:
+ log.Error("DeleteBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ }
+ return
+ }
+
+ if err := issues_model.AddDeletePRBranchComment(ctx, ctx.Doer, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil {
+ // Do not fail here as branch has already been deleted
+ log.Error("DeleteBranch: %v", err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", fullBranchName))
+}
+
+// DownloadPullDiff render a pull's raw diff
+func DownloadPullDiff(ctx *context.Context) {
+ DownloadPullDiffOrPatch(ctx, false)
+}
+
+// DownloadPullPatch render a pull's raw patch
+func DownloadPullPatch(ctx *context.Context) {
+ DownloadPullDiffOrPatch(ctx, true)
+}
+
+// DownloadPullDiffOrPatch render a pull's raw diff or patch
+func DownloadPullDiffOrPatch(ctx *context.Context, patch bool) {
+ pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrPullRequestNotExist(err) {
+ ctx.NotFound("GetPullRequestByIndex", err)
+ } else {
+ ctx.ServerError("GetPullRequestByIndex", err)
+ }
+ return
+ }
+
+ binary := ctx.FormBool("binary")
+
+ if err := pull_service.DownloadDiffOrPatch(ctx, pr, ctx, patch, binary); err != nil {
+ ctx.ServerError("DownloadDiffOrPatch", err)
+ return
+ }
+}
+
+// UpdatePullRequestTarget change pull request's target branch
+func UpdatePullRequestTarget(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ pr := issue.PullRequest
+ if !issue.IsPull {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ targetBranch := ctx.FormTrim("target_branch")
+ if len(targetBranch) == 0 {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil {
+ if issues_model.IsErrPullRequestAlreadyExists(err) {
+ err := err.(issues_model.ErrPullRequestAlreadyExists)
+
+ RepoRelPath := ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
+ errorMessage := ctx.Tr("repo.pulls.has_pull_request", html.EscapeString(ctx.Repo.RepoLink+"/pulls/"+strconv.FormatInt(err.IssueID, 10)), html.EscapeString(RepoRelPath), err.IssueID) // FIXME: Creates url inside locale string
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if issues_model.IsErrIssueIsClosed(err) {
+ errorMessage := ctx.Tr("repo.pulls.is_closed")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if models.IsErrPullRequestHasMerged(err) {
+ errorMessage := ctx.Tr("repo.pulls.has_merged")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if git_model.IsErrBranchesEqual(err) {
+ errorMessage := ctx.Tr("repo.pulls.nothing_to_compare")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusBadRequest, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else {
+ ctx.ServerError("UpdatePullRequestTarget", err)
+ }
+ return
+ }
+ notify_service.PullRequestChangeTargetBranch(ctx, ctx.Doer, pr, targetBranch)
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "base_branch": pr.BaseBranch,
+ })
+}
+
+// SetAllowEdits allow edits from maintainers to PRs
+func SetAllowEdits(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateAllowEditsForm)
+
+ pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrPullRequestNotExist(err) {
+ ctx.NotFound("GetPullRequestByIndex", err)
+ } else {
+ ctx.ServerError("GetPullRequestByIndex", err)
+ }
+ return
+ }
+
+ if err := pull_service.SetAllowEdits(ctx, ctx.Doer, pr, form.AllowMaintainerEdit); err != nil {
+ if errors.Is(err, pull_service.ErrUserHasNoPermissionForAction) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("SetAllowEdits", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "allow_maintainer_edit": pr.AllowMaintainerEdit,
+ })
+}
diff --git a/routers/web/repo/pull_review.go b/routers/web/repo/pull_review.go
new file mode 100644
index 0000000..e8a3c48
--- /dev/null
+++ b/routers/web/repo/pull_review.go
@@ -0,0 +1,316 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ pull_model "code.gitea.io/gitea/models/pull"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+const (
+ tplDiffConversation base.TplName = "repo/diff/conversation"
+ tplTimelineConversation base.TplName = "repo/issue/view_content/conversation"
+ tplNewComment base.TplName = "repo/diff/new_comment"
+)
+
+// RenderNewCodeCommentForm will render the form for creating a new review comment
+func RenderNewCodeCommentForm(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+ currentReview, err := issues_model.GetCurrentReview(ctx, ctx.Doer, issue)
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("GetCurrentReview", err)
+ return
+ }
+ ctx.Data["PageIsPullFiles"] = true
+ ctx.Data["Issue"] = issue
+ ctx.Data["CurrentReview"] = currentReview
+ pullHeadCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(issue.PullRequest.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+ ctx.Data["AfterCommitID"] = pullHeadCommitID
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+ ctx.HTML(http.StatusOK, tplNewComment)
+}
+
+// CreateCodeComment will create a code comment including an pending review if required
+func CreateCodeComment(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CodeCommentForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ signedLine := form.Line
+ if form.Side == "previous" {
+ signedLine *= -1
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ comment, err := pull_service.CreateCodeComment(ctx,
+ ctx.Doer,
+ ctx.Repo.GitRepo,
+ issue,
+ signedLine,
+ form.Content,
+ form.TreePath,
+ !form.SingleReview,
+ form.Reply,
+ form.LatestCommitID,
+ attachments,
+ )
+ if err != nil {
+ ctx.ServerError("CreateCodeComment", err)
+ return
+ }
+
+ if comment == nil {
+ log.Trace("Comment not created: %-v #%d[%d]", ctx.Repo.Repository, issue.Index, issue.ID)
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ log.Trace("Comment created: %-v #%d[%d] Comment[%d]", ctx.Repo.Repository, issue.Index, issue.ID, comment.ID)
+
+ renderConversation(ctx, comment, form.Origin)
+}
+
+// UpdateResolveConversation add or remove an Conversation resolved mark
+func UpdateResolveConversation(ctx *context.Context) {
+ origin := ctx.FormString("origin")
+ action := ctx.FormString("action")
+ commentID := ctx.FormInt64("comment_id")
+
+ comment, err := issues_model.GetCommentByID(ctx, commentID)
+ if err != nil {
+ ctx.ServerError("GetIssueByID", err)
+ return
+ }
+
+ if err = comment.LoadIssue(ctx); err != nil {
+ ctx.ServerError("comment.LoadIssue", err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("comment's repoID is incorrect", errors.New("comment's repoID is incorrect"))
+ return
+ }
+
+ var permResult bool
+ if permResult, err = issues_model.CanMarkConversation(ctx, comment.Issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ if !permResult {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Issue.IsPull {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ if action == "Resolve" || action == "UnResolve" {
+ err = issues_model.MarkConversation(ctx, comment, ctx.Doer, action == "Resolve")
+ if err != nil {
+ ctx.ServerError("MarkConversation", err)
+ return
+ }
+ } else {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ renderConversation(ctx, comment, origin)
+}
+
+func renderConversation(ctx *context.Context, comment *issues_model.Comment, origin string) {
+ comments, err := issues_model.FetchCodeConversation(ctx, comment, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("FetchCodeCommentsByLine", err)
+ return
+ }
+ ctx.Data["PageIsPullFiles"] = (origin == "diff")
+
+ if err := comments.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.Data["comments"] = comments
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, comment.Issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ ctx.Data["Issue"] = comment.Issue
+ if err = comment.Issue.LoadPullRequest(ctx); err != nil {
+ ctx.ServerError("comment.Issue.LoadPullRequest", err)
+ return
+ }
+ pullHeadCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(comment.Issue.PullRequest.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+ ctx.Data["AfterCommitID"] = pullHeadCommitID
+ if origin == "diff" {
+ ctx.HTML(http.StatusOK, tplDiffConversation)
+ } else if origin == "timeline" {
+ ctx.HTML(http.StatusOK, tplTimelineConversation)
+ }
+}
+
+// SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist
+func SubmitReview(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.SubmitReviewForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ reviewType := form.ReviewType()
+ switch reviewType {
+ case issues_model.ReviewTypeUnknown:
+ ctx.ServerError("ReviewType", fmt.Errorf("unknown ReviewType: %s", form.Type))
+ return
+
+ // can not approve/reject your own PR
+ case issues_model.ReviewTypeApprove, issues_model.ReviewTypeReject:
+ if issue.IsPoster(ctx.Doer.ID) {
+ var translated string
+ if reviewType == issues_model.ReviewTypeApprove {
+ translated = ctx.Locale.TrString("repo.issues.review.self.approval")
+ } else {
+ translated = ctx.Locale.TrString("repo.issues.review.self.rejection")
+ }
+
+ ctx.Flash.Error(translated)
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ _, comm, err := pull_service.SubmitReview(ctx, ctx.Doer, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID, attachments)
+ if err != nil {
+ if issues_model.IsContentEmptyErr(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.review.content.empty"))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ } else {
+ ctx.ServerError("SubmitReview", err)
+ }
+ return
+ }
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d#%s", ctx.Repo.RepoLink, issue.Index, comm.HashTag()))
+}
+
+// DismissReview dismissing stale review by repo admin
+func DismissReview(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.DismissReviewForm)
+ comm, err := pull_service.DismissReview(ctx, form.ReviewID, ctx.Repo.Repository.ID, form.Message, ctx.Doer, true, true)
+ if err != nil {
+ if pull_service.IsErrDismissRequestOnClosedPR(err) {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("pull_service.DismissReview", err)
+ return
+ }
+
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d#%s", ctx.Repo.RepoLink, comm.Issue.Index, comm.HashTag()))
+}
+
+// viewedFilesUpdate Struct to parse the body of a request to update the reviewed files of a PR
+// If you want to implement an API to update the review, simply move this struct into modules.
+type viewedFilesUpdate struct {
+ Files map[string]bool `json:"files"`
+ HeadCommitSHA string `json:"headCommitSHA"`
+}
+
+func UpdateViewedFiles(ctx *context.Context) {
+ // Find corresponding PR
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var data *viewedFilesUpdate
+ err := json.NewDecoder(ctx.Req.Body).Decode(&data)
+ if err != nil {
+ log.Warn("Attempted to update a review but could not parse request body: %v", err)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return
+ }
+
+ // Expect the review to have been now if no head commit was supplied
+ if data.HeadCommitSHA == "" {
+ data.HeadCommitSHA = pull.HeadCommitID
+ }
+
+ updatedFiles := make(map[string]pull_model.ViewedState, len(data.Files))
+ for file, viewed := range data.Files {
+ // Only unviewed and viewed are possible, has-changed can not be set from the outside
+ state := pull_model.Unviewed
+ if viewed {
+ state = pull_model.Viewed
+ }
+ updatedFiles[file] = state
+ }
+
+ if err := pull_model.UpdateReviewState(ctx, ctx.Doer.ID, pull.ID, data.HeadCommitSHA, updatedFiles); err != nil {
+ ctx.ServerError("UpdateReview", err)
+ }
+}
diff --git a/routers/web/repo/pull_review_test.go b/routers/web/repo/pull_review_test.go
new file mode 100644
index 0000000..329e83f
--- /dev/null
+++ b/routers/web/repo/pull_review_test.go
@@ -0,0 +1,104 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/pull"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRenderConversation(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ pr, _ := issues_model.GetPullRequestByID(db.DefaultContext, 2)
+ _ = pr.LoadIssue(db.DefaultContext)
+ _ = pr.Issue.LoadPoster(db.DefaultContext)
+ _ = pr.Issue.LoadRepo(db.DefaultContext)
+
+ run := func(name string, cb func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder)) {
+ t.Run(name, func(t *testing.T) {
+ ctx, resp := contexttest.MockContext(t, "/")
+ ctx.Render = templates.HTMLRenderer()
+ contexttest.LoadUser(t, ctx, pr.Issue.PosterID)
+ contexttest.LoadRepo(t, ctx, pr.BaseRepoID)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+ cb(t, ctx, resp)
+ })
+ }
+
+ var preparedComment *issues_model.Comment
+ run("prepare", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ comment, err := pull.CreateCodeComment(ctx, pr.Issue.Poster, ctx.Repo.GitRepo, pr.Issue, 1, "content", "", false, 0, pr.HeadCommitID, nil)
+ require.NoError(t, err)
+
+ comment.Invalidated = true
+ err = issues_model.UpdateCommentInvalidate(ctx, comment)
+ require.NoError(t, err)
+
+ preparedComment = comment
+ })
+ if !assert.NotNil(t, preparedComment) {
+ return
+ }
+ run("diff with outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "diff")
+ assert.Contains(t, resp.Body.String(), `<div class="content comment-container"`)
+ })
+ run("diff without outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = false
+ renderConversation(ctx, preparedComment, "diff")
+ // unlike gitea, Forgejo renders the conversation (with the "outdated" label)
+ assert.Contains(t, resp.Body.String(), `repo.issues.review.outdated_description`)
+ })
+ run("timeline with outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Contains(t, resp.Body.String(), `<div id="code-comments-`)
+ })
+ run("timeline is not affected by ShowOutdatedComments=false", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = false
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Contains(t, resp.Body.String(), `<div id="code-comments-`)
+ })
+ run("diff non-existing review", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ reviews, err := issues_model.FindReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ IssueID: 2,
+ })
+ require.NoError(t, err)
+ for _, r := range reviews {
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, r))
+ }
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "diff")
+ assert.Equal(t, http.StatusOK, resp.Code)
+ assert.NotContains(t, resp.Body.String(), `status-page-500`)
+ })
+ run("timeline non-existing review", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ reviews, err := issues_model.FindReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ IssueID: 2,
+ })
+ require.NoError(t, err)
+ for _, r := range reviews {
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, r))
+ }
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Equal(t, http.StatusOK, resp.Code)
+ assert.NotContains(t, resp.Body.String(), `status-page-500`)
+ })
+}
diff --git a/routers/web/repo/recent_commits.go b/routers/web/repo/recent_commits.go
new file mode 100644
index 0000000..c158fb3
--- /dev/null
+++ b/routers/web/repo/recent_commits.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplRecentCommits base.TplName = "repo/activity"
+)
+
+// RecentCommits renders the page to show recent commit frequency on repository
+func RecentCommits(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.recent_commits")
+
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsRecentCommits"] = true
+ ctx.PageData["repoLink"] = ctx.Repo.RepoLink
+
+ ctx.HTML(http.StatusOK, tplRecentCommits)
+}
+
+// RecentCommitsData returns JSON of recent commits data
+func RecentCommitsData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("RecentCommitsData", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
+ }
+}
diff --git a/routers/web/repo/release.go b/routers/web/repo/release.go
new file mode 100644
index 0000000..2266deb
--- /dev/null
+++ b/routers/web/repo/release.go
@@ -0,0 +1,857 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ releaseservice "code.gitea.io/gitea/services/release"
+)
+
+const (
+ tplReleasesList base.TplName = "repo/release/list"
+ tplReleaseNew base.TplName = "repo/release/new"
+ tplTagsList base.TplName = "repo/tag/list"
+)
+
+// calReleaseNumCommitsBehind calculates given release has how many commits behind release target.
+func calReleaseNumCommitsBehind(repoCtx *context.Repository, release *repo_model.Release, countCache map[string]int64) error {
+ target := release.Target
+ if target == "" {
+ target = repoCtx.Repository.DefaultBranch
+ }
+ // Get count if not cached
+ if _, ok := countCache[target]; !ok {
+ commit, err := repoCtx.GitRepo.GetBranchCommit(target)
+ if err != nil {
+ var errNotExist git.ErrNotExist
+ if target == repoCtx.Repository.DefaultBranch || !errors.As(err, &errNotExist) {
+ return fmt.Errorf("GetBranchCommit: %w", err)
+ }
+ // fallback to default branch
+ target = repoCtx.Repository.DefaultBranch
+ commit, err = repoCtx.GitRepo.GetBranchCommit(target)
+ if err != nil {
+ return fmt.Errorf("GetBranchCommit(DefaultBranch): %w", err)
+ }
+ }
+ countCache[target], err = commit.CommitsCount()
+ if err != nil {
+ return fmt.Errorf("CommitsCount: %w", err)
+ }
+ }
+ release.NumCommitsBehind = countCache[target] - release.NumCommits
+ release.TargetBehind = target
+ return nil
+}
+
+type ReleaseInfo struct {
+ Release *repo_model.Release
+ CommitStatus *git_model.CommitStatus
+ CommitStatuses []*git_model.CommitStatus
+}
+
+func getReleaseInfos(ctx *context.Context, opts *repo_model.FindReleasesOptions) ([]*ReleaseInfo, error) {
+ releases, err := db.Find[repo_model.Release](ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, release := range releases {
+ release.Repo = ctx.Repo.Repository
+ }
+
+ if err = repo_model.GetReleaseAttachments(ctx, releases...); err != nil {
+ return nil, err
+ }
+
+ // Temporary cache commits count of used branches to speed up.
+ countCache := make(map[string]int64)
+ cacheUsers := make(map[int64]*user_model.User)
+ if ctx.Doer != nil {
+ cacheUsers[ctx.Doer.ID] = ctx.Doer
+ }
+ var ok bool
+
+ canReadActions := ctx.Repo.CanRead(unit.TypeActions)
+
+ releaseInfos := make([]*ReleaseInfo, 0, len(releases))
+ for _, r := range releases {
+ if r.Publisher, ok = cacheUsers[r.PublisherID]; !ok {
+ r.Publisher, err = user_model.GetUserByID(ctx, r.PublisherID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ r.Publisher = user_model.NewGhostUser()
+ } else {
+ return nil, err
+ }
+ }
+ cacheUsers[r.PublisherID] = r.Publisher
+ }
+
+ r.RenderedNote, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, r.Note)
+ if err != nil {
+ return nil, err
+ }
+
+ err = r.LoadArchiveDownloadCount(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ if !r.IsDraft {
+ if err := calReleaseNumCommitsBehind(ctx.Repo, r, countCache); err != nil {
+ return nil, err
+ }
+ }
+
+ info := &ReleaseInfo{
+ Release: r,
+ }
+
+ if canReadActions {
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, r.Repo.ID, r.Sha1, db.ListOptionsAll)
+ if err != nil {
+ return nil, err
+ }
+
+ info.CommitStatus = git_model.CalcCommitStatus(statuses)
+ info.CommitStatuses = statuses
+ }
+
+ releaseInfos = append(releaseInfos, info)
+ }
+
+ return releaseInfos, nil
+}
+
+// Releases render releases list page
+func Releases(ctx *context.Context) {
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.release.releases")
+ ctx.Data["IsViewBranch"] = false
+ ctx.Data["IsViewTag"] = true
+ // Disable the showCreateNewBranch form in the dropdown on this page.
+ ctx.Data["CanCreateBranch"] = false
+ ctx.Data["HideBranchesInDropdown"] = true
+
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: ctx.FormInt("limit"),
+ }
+ if listOptions.PageSize == 0 {
+ listOptions.PageSize = setting.Repository.Release.DefaultPagingNum
+ }
+ if listOptions.PageSize > setting.API.MaxResponseItems {
+ listOptions.PageSize = setting.API.MaxResponseItems
+ }
+
+ writeAccess := ctx.Repo.CanWrite(unit.TypeReleases)
+ ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
+
+ releases, err := getReleaseInfos(ctx, &repo_model.FindReleasesOptions{
+ ListOptions: listOptions,
+ // only show draft releases for users who can write, read-only users shouldn't see draft releases.
+ IncludeDrafts: writeAccess,
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("getReleaseInfos", err)
+ return
+ }
+ for _, rel := range releases {
+ if rel.Release.IsTag && rel.Release.Title == "" {
+ rel.Release.Title = rel.Release.TagName
+ }
+ }
+
+ ctx.Data["Releases"] = releases
+ addVerifyTagToContext(ctx)
+
+ numReleases := ctx.Data["NumReleases"].(int64)
+ pager := context.NewPagination(int(numReleases), listOptions.PageSize, listOptions.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplReleasesList)
+}
+
+func verifyTagSignature(ctx *context.Context, r *repo_model.Release) (*asymkey.ObjectVerification, error) {
+ if err := r.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+ gitRepo, err := gitrepo.OpenRepository(ctx, r.Repo)
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ tag, err := gitRepo.GetTag(r.TagName)
+ if err != nil {
+ return nil, err
+ }
+ if tag.Signature == nil {
+ return nil, nil
+ }
+
+ verification := asymkey.ParseTagWithSignature(ctx, gitRepo, tag)
+ return verification, nil
+}
+
+func addVerifyTagToContext(ctx *context.Context) {
+ ctx.Data["VerifyTag"] = func(r *repo_model.Release) *asymkey.ObjectVerification {
+ v, err := verifyTagSignature(ctx, r)
+ if err != nil {
+ return nil
+ }
+ return v
+ }
+ ctx.Data["HasSignature"] = func(verification *asymkey.ObjectVerification) bool {
+ if verification == nil {
+ return false
+ }
+ return verification.Reason != "gpg.error.not_signed_commit"
+ }
+}
+
+// TagsList render tags list page
+func TagsList(ctx *context.Context) {
+ ctx.Data["PageIsTagList"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.release.tags")
+ ctx.Data["IsViewBranch"] = false
+ ctx.Data["IsViewTag"] = true
+ // Disable the showCreateNewBranch form in the dropdown on this page.
+ ctx.Data["CanCreateBranch"] = false
+ ctx.Data["HideBranchesInDropdown"] = true
+ ctx.Data["CanCreateRelease"] = ctx.Repo.CanWrite(unit.TypeReleases) && !ctx.Repo.Repository.IsArchived
+
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: ctx.FormInt("limit"),
+ }
+ if listOptions.PageSize == 0 {
+ listOptions.PageSize = setting.Repository.Release.DefaultPagingNum
+ }
+ if listOptions.PageSize > setting.API.MaxResponseItems {
+ listOptions.PageSize = setting.API.MaxResponseItems
+ }
+
+ opts := repo_model.FindReleasesOptions{
+ ListOptions: listOptions,
+ // for the tags list page, show all releases with real tags (having real commit-id),
+ // the drafts should also be included because a real tag might be used as a draft.
+ IncludeDrafts: true,
+ IncludeTags: true,
+ HasSha1: optional.Some(true),
+ RepoID: ctx.Repo.Repository.ID,
+ }
+
+ releases, err := db.Find[repo_model.Release](ctx, opts)
+ if err != nil {
+ ctx.ServerError("GetReleasesByRepoID", err)
+ return
+ }
+
+ ctx.Data["Releases"] = releases
+ addVerifyTagToContext(ctx)
+
+ numTags := ctx.Data["NumTags"].(int64)
+ pager := context.NewPagination(int(numTags), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.Data["PageIsViewCode"] = !ctx.Repo.Repository.UnitEnabled(ctx, unit.TypeReleases)
+ ctx.HTML(http.StatusOK, tplTagsList)
+}
+
+// ReleasesFeedRSS get feeds for releases in RSS format
+func ReleasesFeedRSS(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, true, "rss")
+}
+
+// TagsListFeedRSS get feeds for tags in RSS format
+func TagsListFeedRSS(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, false, "rss")
+}
+
+// ReleasesFeedAtom get feeds for releases in Atom format
+func ReleasesFeedAtom(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, true, "atom")
+}
+
+// TagsListFeedAtom get feeds for tags in RSS format
+func TagsListFeedAtom(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, false, "atom")
+}
+
+func releasesOrTagsFeed(ctx *context.Context, isReleasesOnly bool, formatType string) {
+ feed.ShowReleaseFeed(ctx, ctx.Repo.Repository, isReleasesOnly, formatType)
+}
+
+// SingleRelease renders a single release's page
+func SingleRelease(ctx *context.Context) {
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["DefaultBranch"] = ctx.Repo.Repository.DefaultBranch
+
+ writeAccess := ctx.Repo.CanWrite(unit.TypeReleases)
+ ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
+
+ releases, err := getReleaseInfos(ctx, &repo_model.FindReleasesOptions{
+ ListOptions: db.ListOptions{Page: 1, PageSize: 1},
+ RepoID: ctx.Repo.Repository.ID,
+ // Include tags in the search too.
+ IncludeTags: true,
+ TagNames: []string{ctx.Params("*")},
+ // only show draft releases for users who can write, read-only users shouldn't see draft releases.
+ IncludeDrafts: writeAccess,
+ })
+ if err != nil {
+ ctx.ServerError("getReleaseInfos", err)
+ return
+ }
+ if len(releases) != 1 {
+ ctx.NotFound("SingleRelease", err)
+ return
+ }
+
+ release := releases[0].Release
+ if release.IsTag && release.Title == "" {
+ release.Title = release.TagName
+ }
+ addVerifyTagToContext(ctx)
+
+ ctx.Data["PageIsSingleTag"] = release.IsTag
+ if release.IsTag {
+ ctx.Data["Title"] = release.TagName
+ } else {
+ ctx.Data["Title"] = release.Title
+ }
+
+ err = release.LoadArchiveDownloadCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadArchiveDownloadCount", err)
+ return
+ }
+
+ ctx.Data["Releases"] = releases
+ ctx.HTML(http.StatusOK, tplReleasesList)
+}
+
+// LatestRelease redirects to the latest release
+func LatestRelease(ctx *context.Context) {
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("LatestRelease", err)
+ return
+ }
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ return
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ ctx.Redirect(release.Link())
+}
+
+// NewRelease render creating or edit release page
+func NewRelease(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.release.new_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["tag_target"] = ctx.Repo.Repository.DefaultBranch
+ if tagName := ctx.FormString("tag"); len(tagName) > 0 {
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil && !repo_model.IsErrReleaseNotExist(err) {
+ ctx.ServerError("GetRelease", err)
+ return
+ }
+
+ if rel != nil {
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ ctx.Data["tag_name"] = rel.TagName
+ if rel.Target != "" {
+ ctx.Data["tag_target"] = rel.Target
+ }
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["attachments"] = rel.Attachments
+ }
+ }
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ upload.AddUploadContext(ctx, "release")
+
+ // For New Release page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ // We set the value of the hide_archive_link textbox depending on the latest release
+ latestRelease, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.Data["hide_archive_links"] = false
+ } else {
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ return
+ }
+ }
+ if latestRelease != nil {
+ ctx.Data["hide_archive_links"] = latestRelease.HideArchiveLinks
+ }
+
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+}
+
+// NewReleasePost response for creating a release
+func NewReleasePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewReleaseForm)
+ ctx.Data["Title"] = ctx.Tr("repo.release.new_release")
+ ctx.Data["PageIsReleaseList"] = true
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+ return
+ }
+
+ objectFormat, _ := ctx.Repo.GitRepo.GetObjectFormat()
+
+ // form.Target can be a branch name or a full commitID.
+ if !ctx.Repo.GitRepo.IsBranchExist(form.Target) &&
+ len(form.Target) == objectFormat.FullLength() && !ctx.Repo.GitRepo.IsCommitExist(form.Target) {
+ ctx.RenderWithErr(ctx.Tr("form.target_branch_not_exist"), tplReleaseNew, &form)
+ return
+ }
+
+ // Title of release cannot be empty
+ if len(form.TagOnly) == 0 && len(form.Title) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.release.title_empty"), tplReleaseNew, &form)
+ return
+ }
+
+ attachmentChanges := make(container.Set[*releaseservice.AttachmentChange])
+ attachmentChangesByID := make(map[string]*releaseservice.AttachmentChange)
+
+ if setting.Attachment.Enabled {
+ for _, uuid := range form.Files {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "attachment",
+ UUID: uuid,
+ })
+ }
+
+ const namePrefix = "attachment-new-name-"
+ const exturlPrefix = "attachment-new-exturl-"
+ for k, v := range ctx.Req.Form {
+ isNewName := strings.HasPrefix(k, namePrefix)
+ isNewExturl := strings.HasPrefix(k, exturlPrefix)
+ if isNewName || isNewExturl {
+ var id string
+ if isNewName {
+ id = k[len(namePrefix):]
+ } else if isNewExturl {
+ id = k[len(exturlPrefix):]
+ }
+ if _, ok := attachmentChangesByID[id]; !ok {
+ attachmentChangesByID[id] = &releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "external",
+ }
+ attachmentChanges.Add(attachmentChangesByID[id])
+ }
+ if isNewName {
+ attachmentChangesByID[id].Name = v[0]
+ } else if isNewExturl {
+ attachmentChangesByID[id].ExternalURL = v[0]
+ }
+ }
+ }
+ }
+
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, form.TagName)
+ if err != nil {
+ if !repo_model.IsErrReleaseNotExist(err) {
+ ctx.ServerError("GetRelease", err)
+ return
+ }
+
+ msg := ""
+ if len(form.Title) > 0 && form.AddTagMsg {
+ msg = form.Title + "\n\n" + form.Content
+ }
+
+ if len(form.TagOnly) > 0 {
+ if err = releaseservice.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, form.Target, form.TagName, msg); err != nil {
+ if models.IsErrTagAlreadyExists(err) {
+ e := err.(models.ErrTagAlreadyExists)
+ ctx.Flash.Error(ctx.Tr("repo.branch.tag_collision", e.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrInvalidTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_invalid"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ ctx.ServerError("releaseservice.CreateNewTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.tag.create_success", form.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/tag/" + util.PathEscapeSegments(form.TagName))
+ return
+ }
+
+ rel = &repo_model.Release{
+ RepoID: ctx.Repo.Repository.ID,
+ Repo: ctx.Repo.Repository,
+ PublisherID: ctx.Doer.ID,
+ Publisher: ctx.Doer,
+ Title: form.Title,
+ TagName: form.TagName,
+ Target: form.Target,
+ Note: form.Content,
+ IsDraft: len(form.Draft) > 0,
+ IsPrerelease: form.Prerelease,
+ HideArchiveLinks: form.HideArchiveLinks,
+ IsTag: false,
+ }
+
+ if err = releaseservice.CreateRelease(ctx.Repo.GitRepo, rel, msg, attachmentChanges.Values()); err != nil {
+ ctx.Data["Err_TagName"] = true
+ switch {
+ case repo_model.IsErrReleaseAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_already_exist"), tplReleaseNew, &form)
+ case models.IsErrInvalidTagName(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_invalid"), tplReleaseNew, &form)
+ case models.IsErrProtectedTagName(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_protected"), tplReleaseNew, &form)
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("CreateRelease", err)
+ }
+ return
+ }
+ } else {
+ if !rel.IsTag {
+ ctx.Data["Err_TagName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_already_exist"), tplReleaseNew, &form)
+ return
+ }
+
+ rel.Title = form.Title
+ rel.Note = form.Content
+ rel.Target = form.Target
+ rel.IsDraft = len(form.Draft) > 0
+ rel.IsPrerelease = form.Prerelease
+ rel.PublisherID = ctx.Doer.ID
+ rel.HideArchiveLinks = form.HideArchiveLinks
+ rel.IsTag = false
+
+ if err = releaseservice.UpdateRelease(ctx, ctx.Doer, ctx.Repo.GitRepo, rel, true, attachmentChanges.Values()); err != nil {
+ ctx.Data["Err_TagName"] = true
+ switch {
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("UpdateRelease", err)
+ }
+ return
+ }
+ }
+ log.Trace("Release created: %s/%s:%s", ctx.Doer.LowerName, ctx.Repo.Repository.Name, form.TagName)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/releases")
+}
+
+// EditRelease render release edit page
+func EditRelease(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.release.edit_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["PageIsEditRelease"] = true
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "release")
+
+ tagName := ctx.Params("*")
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetRelease", err)
+ } else {
+ ctx.ServerError("GetRelease", err)
+ }
+ return
+ }
+ ctx.Data["ID"] = rel.ID
+ ctx.Data["tag_name"] = rel.TagName
+ ctx.Data["tag_target"] = rel.Target
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["prerelease"] = rel.IsPrerelease
+ ctx.Data["hide_archive_links"] = rel.HideArchiveLinks
+ ctx.Data["IsDraft"] = rel.IsDraft
+
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Data["attachments"] = rel.Attachments
+
+ // Get assignees.
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, rel.Repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+}
+
+// EditReleasePost response for edit release
+func EditReleasePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditReleaseForm)
+ ctx.Data["Title"] = ctx.Tr("repo.release.edit_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["PageIsEditRelease"] = true
+
+ tagName := ctx.Params("*")
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetRelease", err)
+ } else {
+ ctx.ServerError("GetRelease", err)
+ }
+ return
+ }
+ if rel.IsTag {
+ ctx.NotFound("GetRelease", err)
+ return
+ }
+ ctx.Data["tag_name"] = rel.TagName
+ ctx.Data["tag_target"] = rel.Target
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["prerelease"] = rel.IsPrerelease
+ ctx.Data["hide_archive_links"] = rel.HideArchiveLinks
+
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ // TODO: If an error occurs, do not forget the attachment edits the user made
+ // when displaying the error message.
+ ctx.Data["attachments"] = rel.Attachments
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+ return
+ }
+
+ const delPrefix = "attachment-del-"
+ const editPrefix = "attachment-edit-"
+ const newPrefix = "attachment-new-"
+ const namePrefix = "name-"
+ const exturlPrefix = "exturl-"
+ attachmentChanges := make(container.Set[*releaseservice.AttachmentChange])
+ attachmentChangesByID := make(map[string]*releaseservice.AttachmentChange)
+
+ if setting.Attachment.Enabled {
+ for _, uuid := range form.Files {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "attachment",
+ UUID: uuid,
+ })
+ }
+
+ for k, v := range ctx.Req.Form {
+ if strings.HasPrefix(k, delPrefix) && v[0] == "true" {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "delete",
+ UUID: k[len(delPrefix):],
+ })
+ } else {
+ isUpdatedName := strings.HasPrefix(k, editPrefix+namePrefix)
+ isUpdatedExturl := strings.HasPrefix(k, editPrefix+exturlPrefix)
+ isNewName := strings.HasPrefix(k, newPrefix+namePrefix)
+ isNewExturl := strings.HasPrefix(k, newPrefix+exturlPrefix)
+
+ if isUpdatedName || isUpdatedExturl || isNewName || isNewExturl {
+ var uuid string
+
+ if isUpdatedName {
+ uuid = k[len(editPrefix+namePrefix):]
+ } else if isUpdatedExturl {
+ uuid = k[len(editPrefix+exturlPrefix):]
+ } else if isNewName {
+ uuid = k[len(newPrefix+namePrefix):]
+ } else if isNewExturl {
+ uuid = k[len(newPrefix+exturlPrefix):]
+ }
+
+ if _, ok := attachmentChangesByID[uuid]; !ok {
+ attachmentChangesByID[uuid] = &releaseservice.AttachmentChange{
+ Type: "attachment",
+ UUID: uuid,
+ }
+ attachmentChanges.Add(attachmentChangesByID[uuid])
+ }
+
+ if isUpdatedName || isUpdatedExturl {
+ attachmentChangesByID[uuid].Action = "update"
+ } else if isNewName || isNewExturl {
+ attachmentChangesByID[uuid].Action = "add"
+ }
+
+ if isUpdatedName || isNewName {
+ attachmentChangesByID[uuid].Name = v[0]
+ } else if isUpdatedExturl || isNewExturl {
+ attachmentChangesByID[uuid].ExternalURL = v[0]
+ attachmentChangesByID[uuid].Type = "external"
+ }
+ }
+ }
+ }
+ }
+
+ rel.Title = form.Title
+ rel.Note = form.Content
+ rel.IsDraft = len(form.Draft) > 0
+ rel.IsPrerelease = form.Prerelease
+ rel.HideArchiveLinks = form.HideArchiveLinks
+ if err = releaseservice.UpdateRelease(ctx, ctx.Doer, ctx.Repo.GitRepo, rel, false, attachmentChanges.Values()); err != nil {
+ switch {
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("UpdateRelease", err)
+ }
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/releases")
+}
+
+// DeleteRelease deletes a release
+func DeleteRelease(ctx *context.Context) {
+ deleteReleaseOrTag(ctx, false)
+}
+
+// DeleteTag deletes a tag
+func DeleteTag(ctx *context.Context) {
+ deleteReleaseOrTag(ctx, true)
+}
+
+func deleteReleaseOrTag(ctx *context.Context, isDelTag bool) {
+ redirect := func() {
+ if isDelTag {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/tags")
+ return
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/releases")
+ }
+
+ rel, err := repo_model.GetReleaseForRepoByID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id"))
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetReleaseForRepoByID", err)
+ } else {
+ ctx.Flash.Error("DeleteReleaseByID: " + err.Error())
+ redirect()
+ }
+ return
+ }
+
+ if err := releaseservice.DeleteReleaseByID(ctx, ctx.Repo.Repository, rel, ctx.Doer, isDelTag); err != nil {
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ } else {
+ ctx.Flash.Error("DeleteReleaseByID: " + err.Error())
+ }
+ } else {
+ if isDelTag {
+ ctx.Flash.Success(ctx.Tr("repo.release.deletion_tag_success"))
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.release.deletion_success"))
+ }
+ }
+
+ redirect()
+}
diff --git a/routers/web/repo/release_test.go b/routers/web/repo/release_test.go
new file mode 100644
index 0000000..5c7b6e2
--- /dev/null
+++ b/routers/web/repo/release_test.go
@@ -0,0 +1,124 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewReleasePost(t *testing.T) {
+ for _, testCase := range []struct {
+ RepoID int64
+ UserID int64
+ TagName string
+ Form forms.NewReleaseForm
+ }{
+ {
+ RepoID: 1,
+ UserID: 2,
+ TagName: "v1.1", // pre-existing tag
+ Form: forms.NewReleaseForm{
+ TagName: "newtag",
+ Target: "master",
+ Title: "title",
+ Content: "content",
+ },
+ },
+ {
+ RepoID: 1,
+ UserID: 2,
+ TagName: "newtag",
+ Form: forms.NewReleaseForm{
+ TagName: "newtag",
+ Target: "master",
+ Title: "title",
+ Content: "content",
+ },
+ },
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/releases/new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ web.SetForm(ctx, &testCase.Form)
+ NewReleasePost(ctx)
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Release{
+ RepoID: 1,
+ PublisherID: 2,
+ TagName: testCase.Form.TagName,
+ Target: testCase.Form.Target,
+ Title: testCase.Form.Title,
+ Note: testCase.Form.Content,
+ }, unittest.Cond("is_draft=?", len(testCase.Form.Draft) > 0))
+ ctx.Repo.GitRepo.Close()
+ }
+}
+
+func TestCalReleaseNumCommitsBehind(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo-release/releases")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 57)
+ contexttest.LoadGitRepo(t, ctx)
+ t.Cleanup(func() { ctx.Repo.GitRepo.Close() })
+
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: ctx.Repo.CanWrite(unit.TypeReleases),
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ require.NoError(t, err)
+
+ countCache := make(map[string]int64)
+ for _, release := range releases {
+ err := calReleaseNumCommitsBehind(ctx.Repo, release, countCache)
+ require.NoError(t, err)
+ }
+
+ type computedFields struct {
+ NumCommitsBehind int64
+ TargetBehind string
+ }
+ expectedComputation := map[string]computedFields{
+ "v1.0": {
+ NumCommitsBehind: 3,
+ TargetBehind: "main",
+ },
+ "v1.1": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ "v2.0": {
+ NumCommitsBehind: 0,
+ TargetBehind: "main",
+ },
+ "non-existing-target-branch": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ "empty-target-branch": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ }
+ for _, r := range releases {
+ actual := computedFields{
+ NumCommitsBehind: r.NumCommitsBehind,
+ TargetBehind: r.TargetBehind,
+ }
+ assert.Equal(t, expectedComputation[r.TagName], actual, "wrong computed fields for %s: %#v", r.TagName, r)
+ }
+}
diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go
new file mode 100644
index 0000000..e64db03
--- /dev/null
+++ b/routers/web/repo/render.go
@@ -0,0 +1,76 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "io"
+ "net/http"
+ "path"
+
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+// RenderFile renders a file by repos path
+func RenderFile(ctx *context.Context) {
+ blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlobByPath", err)
+ } else {
+ ctx.ServerError("GetBlobByPath", err)
+ }
+ return
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ ctx.ServerError("DataAsync", err)
+ return
+ }
+ defer dataRc.Close()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ isTextFile := st.IsText()
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+ ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'; sandbox allow-scripts")
+
+ if markupType := markup.Type(blob.Name()); markupType == "" {
+ if isTextFile {
+ _, _ = io.Copy(ctx.Resp, rd)
+ } else {
+ http.Error(ctx.Resp, "Unsupported file type render", http.StatusInternalServerError)
+ }
+ return
+ }
+
+ err = markup.Render(&markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ InStandalonePage: true,
+ }, rd, ctx.Resp)
+ if err != nil {
+ log.Error("Failed to render file %q: %v", ctx.Repo.TreePath, err)
+ http.Error(ctx.Resp, "Failed to render file", http.StatusInternalServerError)
+ return
+ }
+}
diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go
new file mode 100644
index 0000000..9562491
--- /dev/null
+++ b/routers/web/repo/repo.go
@@ -0,0 +1,774 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "slices"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+ archiver_service "code.gitea.io/gitea/services/repository/archiver"
+ commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
+)
+
+const (
+ tplCreate base.TplName = "repo/create"
+ tplAlertDetails base.TplName = "base/alert_details"
+)
+
+// MustBeNotEmpty render when a repo is a empty git dir
+func MustBeNotEmpty(ctx *context.Context) {
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.NotFound("MustBeNotEmpty", nil)
+ }
+}
+
+// MustBeEditable check that repo can be edited
+func MustBeEditable(ctx *context.Context) {
+ if !ctx.Repo.Repository.CanEnableEditor() || ctx.Repo.IsViewCommit {
+ ctx.NotFound("", nil)
+ return
+ }
+}
+
+// MustBeAbleToUpload check that repo can be uploaded to
+func MustBeAbleToUpload(ctx *context.Context) {
+ if !setting.Repository.Upload.Enabled {
+ ctx.NotFound("", nil)
+ }
+}
+
+func CommitInfoCache(ctx *context.Context) {
+ var err error
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("GetBranchCommit", err)
+ return
+ }
+ ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+ ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
+ ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache())
+}
+
+func checkContextUser(ctx *context.Context, uid int64) *user_model.User {
+ orgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetOrgsCanCreateRepoByUserID", err)
+ return nil
+ }
+
+ if !ctx.Doer.IsAdmin {
+ orgsAvailable := []*organization.Organization{}
+ for i := 0; i < len(orgs); i++ {
+ if orgs[i].CanCreateRepo() {
+ orgsAvailable = append(orgsAvailable, orgs[i])
+ }
+ }
+ ctx.Data["Orgs"] = orgsAvailable
+ } else {
+ ctx.Data["Orgs"] = orgs
+ }
+
+ // Not equal means current user is an organization.
+ if uid == ctx.Doer.ID || uid == 0 {
+ return ctx.Doer
+ }
+
+ org, err := user_model.GetUserByID(ctx, uid)
+ if user_model.IsErrUserNotExist(err) {
+ return ctx.Doer
+ }
+
+ if err != nil {
+ ctx.ServerError("GetUserByID", fmt.Errorf("[%d]: %w", uid, err))
+ return nil
+ }
+
+ // Check ownership of organization.
+ if !org.IsOrganization() {
+ ctx.Error(http.StatusForbidden)
+ return nil
+ }
+ if !ctx.Doer.IsAdmin {
+ canCreate, err := organization.OrgFromUser(org).CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return nil
+ } else if !canCreate {
+ ctx.Error(http.StatusForbidden)
+ return nil
+ }
+ } else {
+ ctx.Data["Orgs"] = orgs
+ }
+ return org
+}
+
+func getRepoPrivate(ctx *context.Context) bool {
+ switch strings.ToLower(setting.Repository.DefaultPrivate) {
+ case setting.RepoCreatingLastUserVisibility:
+ return ctx.Doer.LastRepoVisibility
+ case setting.RepoCreatingPrivate:
+ return true
+ case setting.RepoCreatingPublic:
+ return false
+ default:
+ return ctx.Doer.LastRepoVisibility
+ }
+}
+
+// Create render creating repository page
+func Create(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("new_repo.title")
+
+ // Give default value for template to render.
+ ctx.Data["Gitignores"] = repo_module.Gitignores
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.Data["Licenses"] = repo_module.Licenses
+ ctx.Data["Readmes"] = repo_module.Readmes
+ ctx.Data["readme"] = "Default"
+ ctx.Data["private"] = getRepoPrivate(ctx)
+ ctx.Data["IsForcedPrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["default_branch"] = setting.Repository.DefaultBranch
+
+ ctxUser := checkContextUser(ctx, ctx.FormInt64("org"))
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ ctx.Data["repo_template_name"] = ctx.Tr("repo.template_select")
+ templateID := ctx.FormInt64("template_id")
+ if templateID > 0 {
+ templateRepo, err := repo_model.GetRepositoryByID(ctx, templateID)
+ if err == nil && access_model.CheckRepoUnitUser(ctx, templateRepo, ctxUser, unit.TypeCode) {
+ ctx.Data["repo_template"] = templateID
+ ctx.Data["repo_template_name"] = templateRepo.Name
+ }
+ }
+
+ ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo()
+ ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit()
+ ctx.Data["SupportedObjectFormats"] = git.SupportedObjectFormats
+ ctx.Data["DefaultObjectFormat"] = git.Sha1ObjectFormat
+
+ ctx.HTML(http.StatusOK, tplCreate)
+}
+
+func handleCreateError(ctx *context.Context, owner *user_model.User, err error, name string, tpl base.TplName, form any) {
+ switch {
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tpl, form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tpl, form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tpl, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tpl, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tpl, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tpl, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tpl, form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form)
+ default:
+ ctx.ServerError(name, err)
+ }
+}
+
+// CreatePost response for creating repository
+func CreatePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateRepoForm)
+ ctx.Data["Title"] = ctx.Tr("new_repo.title")
+
+ ctx.Data["Gitignores"] = repo_module.Gitignores
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.Data["Licenses"] = repo_module.Licenses
+ ctx.Data["Readmes"] = repo_module.Readmes
+
+ ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo()
+ ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit()
+ ctx.Data["SupportedObjectFormats"] = git.SupportedObjectFormats
+ ctx.Data["DefaultObjectFormat"] = git.Sha1ObjectFormat
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplCreate)
+ return
+ }
+
+ var repo *repo_model.Repository
+ var err error
+ if form.RepoTemplate > 0 {
+ opts := repo_service.GenerateRepoOptions{
+ Name: form.RepoName,
+ Description: form.Description,
+ Private: form.Private || setting.Repository.ForcePrivate,
+ GitContent: form.GitContent,
+ Topics: form.Topics,
+ GitHooks: form.GitHooks,
+ Webhooks: form.Webhooks,
+ Avatar: form.Avatar,
+ IssueLabels: form.Labels,
+ ProtectedBranch: form.ProtectedBranch,
+ }
+
+ if !opts.IsValid() {
+ ctx.RenderWithErr(ctx.Tr("repo.template.one_item"), tplCreate, form)
+ return
+ }
+
+ templateRepo := getRepository(ctx, form.RepoTemplate)
+ if ctx.Written() {
+ return
+ }
+
+ if !templateRepo.IsTemplate {
+ ctx.RenderWithErr(ctx.Tr("repo.template.invalid"), tplCreate, form)
+ return
+ }
+
+ repo, err = repo_service.GenerateRepository(ctx, ctx.Doer, ctxUser, templateRepo, opts)
+ if err == nil {
+ log.Trace("Repository generated [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(repo.Link())
+ return
+ }
+ } else {
+ repo, err = repo_service.CreateRepository(ctx, ctx.Doer, ctxUser, repo_service.CreateRepoOptions{
+ Name: form.RepoName,
+ Description: form.Description,
+ Gitignores: form.Gitignores,
+ IssueLabels: form.IssueLabels,
+ License: form.License,
+ Readme: form.Readme,
+ IsPrivate: form.Private || setting.Repository.ForcePrivate,
+ DefaultBranch: form.DefaultBranch,
+ AutoInit: form.AutoInit,
+ IsTemplate: form.Template,
+ TrustModel: repo_model.DefaultTrustModel,
+ ObjectFormatName: form.ObjectFormatName,
+ })
+ if err == nil {
+ log.Trace("Repository created [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(repo.Link())
+ return
+ }
+ }
+
+ handleCreateError(ctx, ctxUser, err, "CreatePost", tplCreate, &form)
+}
+
+const (
+ tplWatchUnwatch base.TplName = "repo/watch_unwatch"
+ tplStarUnstar base.TplName = "repo/star_unstar"
+)
+
+func ActionWatch(watch bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ err := repo_model.WatchRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID, watch)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (watch, %t)", watch), err)
+ return
+ }
+
+ ctx.Data["IsWatchingRepo"] = repo_model.IsWatching(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+
+ // we have to reload the repository because NumStars or NumWatching (used in the templates) has just changed
+ ctx.Data["Repository"], err = repo_model.GetRepositoryByName(ctx, ctx.Repo.Repository.OwnerID, ctx.Repo.Repository.Name)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (watch, %t)", watch), err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplWatchUnwatch)
+ }
+}
+
+func ActionStar(star bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ err := repo_service.StarRepoAndSendLikeActivities(ctx, *ctx.Doer, ctx.Repo.Repository.ID, star)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (star, %t)", star), err)
+ return
+ }
+
+ ctx.Data["IsStaringRepo"] = repo_model.IsStaring(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+
+ // we have to reload the repository because NumStars or NumWatching (used in the templates) has just changed
+ ctx.Data["Repository"], err = repo_model.GetRepositoryByName(ctx, ctx.Repo.Repository.OwnerID, ctx.Repo.Repository.Name)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (star, %t)", star), err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplStarUnstar)
+ }
+}
+
+func ActionTransfer(accept bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ var action string
+ if accept {
+ action = "accept_transfer"
+ } else {
+ action = "reject_transfer"
+ }
+
+ ok, err := acceptOrRejectRepoTransfer(ctx, accept)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (%s)", action), err)
+ return
+ }
+ if !ok {
+ return
+ }
+
+ ctx.RedirectToFirst(ctx.FormString("redirect_to"), ctx.Repo.RepoLink)
+ }
+}
+
+func acceptOrRejectRepoTransfer(ctx *context.Context, accept bool) (bool, error) {
+ repoTransfer, err := models.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
+ if err != nil {
+ return false, err
+ }
+
+ if err := repoTransfer.LoadAttributes(ctx); err != nil {
+ return false, err
+ }
+
+ if !repoTransfer.CanUserAcceptTransfer(ctx, ctx.Doer) {
+ return false, errors.New("user does not have enough permissions")
+ }
+
+ if accept {
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctx.Doer.ID, ctx.Doer.Name) {
+ return false, nil
+ }
+
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+
+ if err := repo_service.TransferOwnership(ctx, repoTransfer.Doer, repoTransfer.Recipient, ctx.Repo.Repository, repoTransfer.Teams); err != nil {
+ return false, err
+ }
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer.success"))
+ } else {
+ if err := repo_service.CancelRepositoryTransfer(ctx, ctx.Repo.Repository); err != nil {
+ return false, err
+ }
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer.rejected"))
+ }
+
+ ctx.Redirect(ctx.Repo.Repository.Link())
+ return true, nil
+}
+
+// RedirectDownload return a file based on the following infos:
+func RedirectDownload(ctx *context.Context) {
+ var (
+ vTag = ctx.Params("vTag")
+ fileName = ctx.Params("fileName")
+ )
+ tagNames := []string{vTag}
+ curRepo := ctx.Repo.Repository
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: ctx.Repo.CanWrite(unit.TypeReleases),
+ RepoID: curRepo.ID,
+ TagNames: tagNames,
+ })
+ if err != nil {
+ ctx.ServerError("RedirectDownload", err)
+ return
+ }
+ if len(releases) == 1 {
+ release := releases[0]
+ att, err := repo_model.GetAttachmentByReleaseIDFileName(ctx, release.ID, fileName)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if att != nil {
+ ServeAttachment(ctx, att.UUID)
+ return
+ }
+ } else if len(releases) == 0 && vTag == "latest" {
+ // GitHub supports the alias "latest" for the latest release
+ // We only fetch the latest release if the tag is "latest" and no release with the tag "latest" exists
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ att, err := repo_model.GetAttachmentByReleaseIDFileName(ctx, release.ID, fileName)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if att != nil {
+ ServeAttachment(ctx, att.UUID)
+ return
+ }
+ }
+ ctx.Error(http.StatusNotFound)
+}
+
+// Download an archive of a repository
+func Download(ctx *context.Context) {
+ uri := ctx.Params("*")
+ aReq, err := archiver_service.NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri)
+ if err != nil {
+ if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ } else {
+ ctx.ServerError("archiver_service.NewRequest", err)
+ }
+ return
+ }
+
+ archiver, err := aReq.Await(ctx)
+ if err != nil {
+ ctx.ServerError("archiver.Await", err)
+ return
+ }
+
+ download(ctx, aReq.GetArchiveName(), archiver)
+}
+
+func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
+ downloadName := ctx.Repo.Repository.Name + "-" + archiveName
+
+ // Add nix format link header so tarballs lock correctly:
+ // https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
+ ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
+ ctx.Repo.Repository.APIURL(),
+ archiver.CommitID, archiver.CommitID))
+
+ rPath := archiver.RelativePath()
+ if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.RepoArchives.URL(rPath, downloadName)
+ if u != nil && err == nil {
+ if archiver.ReleaseID != 0 {
+ err = repo_model.CountArchiveDownload(ctx, ctx.Repo.Repository.ID, archiver.ReleaseID, archiver.Type)
+ if err != nil {
+ ctx.ServerError("CountArchiveDownload", err)
+ return
+ }
+ }
+
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+
+ // If we have matched and access to release or issue
+ fr, err := storage.RepoArchives.Open(rPath)
+ if err != nil {
+ ctx.ServerError("Open", err)
+ return
+ }
+ defer fr.Close()
+
+ if archiver.ReleaseID != 0 {
+ err = repo_model.CountArchiveDownload(ctx, ctx.Repo.Repository.ID, archiver.ReleaseID, archiver.Type)
+ if err != nil {
+ ctx.ServerError("CountArchiveDownload", err)
+ return
+ }
+ }
+
+ ctx.ServeContent(fr, &context.ServeHeaderOptions{
+ Filename: downloadName,
+ LastModified: archiver.CreatedUnix.AsLocalTime(),
+ })
+}
+
+// InitiateDownload will enqueue an archival request, as needed. It may submit
+// a request that's already in-progress, but the archiver service will just
+// kind of drop it on the floor if this is the case.
+func InitiateDownload(ctx *context.Context) {
+ uri := ctx.Params("*")
+ aReq, err := archiver_service.NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri)
+ if err != nil {
+ ctx.ServerError("archiver_service.NewRequest", err)
+ return
+ }
+ if aReq == nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ archiver, err := repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID)
+ if err != nil {
+ ctx.ServerError("archiver_service.StartArchive", err)
+ return
+ }
+ if archiver == nil || archiver.Status != repo_model.ArchiverReady {
+ if err := archiver_service.StartArchive(aReq); err != nil {
+ ctx.ServerError("archiver_service.StartArchive", err)
+ return
+ }
+ }
+
+ var completed bool
+ if archiver != nil && archiver.Status == repo_model.ArchiverReady {
+ completed = true
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "complete": completed,
+ })
+}
+
+// SearchRepo repositories via options
+func SearchRepo(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+ opts := &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ Actor: ctx.Doer,
+ Keyword: ctx.FormTrim("q"),
+ OwnerID: ctx.FormInt64("uid"),
+ PriorityOwnerID: ctx.FormInt64("priority_owner_id"),
+ TeamID: ctx.FormInt64("team_id"),
+ TopicOnly: ctx.FormBool("topic"),
+ Collaborate: optional.None[bool](),
+ Private: ctx.IsSigned && (ctx.FormString("private") == "" || ctx.FormBool("private")),
+ Template: optional.None[bool](),
+ StarredByID: ctx.FormInt64("starredBy"),
+ IncludeDescription: ctx.FormBool("includeDesc"),
+ }
+
+ if ctx.FormString("template") != "" {
+ opts.Template = optional.Some(ctx.FormBool("template"))
+ }
+
+ if ctx.FormBool("exclusive") {
+ opts.Collaborate = optional.Some(false)
+ }
+
+ mode := ctx.FormString("mode")
+ switch mode {
+ case "source":
+ opts.Fork = optional.Some(false)
+ opts.Mirror = optional.Some(false)
+ case "fork":
+ opts.Fork = optional.Some(true)
+ case "mirror":
+ opts.Mirror = optional.Some(true)
+ case "collaborative":
+ opts.Mirror = optional.Some(false)
+ opts.Collaborate = optional.Some(true)
+ case "":
+ default:
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid search mode: \"%s\"", mode))
+ return
+ }
+
+ if ctx.FormString("archived") != "" {
+ opts.Archived = optional.Some(ctx.FormBool("archived"))
+ }
+
+ if ctx.FormString("is_private") != "" {
+ opts.IsPrivate = optional.Some(ctx.FormBool("is_private"))
+ }
+
+ sortMode := ctx.FormString("sort")
+ if len(sortMode) > 0 {
+ sortOrder := ctx.FormString("order")
+ if len(sortOrder) == 0 {
+ sortOrder = "asc"
+ }
+ if searchModeMap, ok := repo_model.OrderByMap[sortOrder]; ok {
+ if orderBy, ok := searchModeMap[sortMode]; ok {
+ opts.OrderBy = orderBy
+ } else {
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort mode: \"%s\"", sortMode))
+ return
+ }
+ } else {
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort order: \"%s\"", sortOrder))
+ return
+ }
+ }
+
+ // To improve performance when only the count is requested
+ if ctx.FormBool("count_only") {
+ if count, err := repo_model.CountRepository(ctx, opts); err != nil {
+ log.Error("CountRepository: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil) // frontend JS doesn't handle error response (same as below)
+ } else {
+ ctx.SetTotalCountHeader(count)
+ ctx.JSONOK()
+ }
+ return
+ }
+
+ repos, count, err := repo_model.SearchRepository(ctx, opts)
+ if err != nil {
+ log.Error("SearchRepository: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil)
+ return
+ }
+
+ ctx.SetTotalCountHeader(count)
+
+ latestCommitStatuses, err := commitstatus_service.FindReposLastestCommitStatuses(ctx, repos)
+ if err != nil {
+ log.Error("FindReposLastestCommitStatuses: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, latestCommitStatuses)
+ }
+
+ results := make([]*repo_service.WebSearchRepository, len(repos))
+ for i, repo := range repos {
+ results[i] = &repo_service.WebSearchRepository{
+ Repository: &api.Repository{
+ ID: repo.ID,
+ FullName: repo.FullName(),
+ Fork: repo.IsFork,
+ Private: repo.IsPrivate,
+ Template: repo.IsTemplate,
+ Mirror: repo.IsMirror,
+ Stars: repo.NumStars,
+ HTMLURL: repo.HTMLURL(),
+ Link: repo.Link(),
+ Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
+ },
+ }
+
+ if latestCommitStatuses[i] != nil {
+ results[i].LatestCommitStatus = latestCommitStatuses[i]
+ results[i].LocaleLatestCommitStatus = latestCommitStatuses[i].LocaleString(ctx.Locale)
+ }
+ }
+
+ ctx.JSON(http.StatusOK, repo_service.WebSearchResults{
+ OK: true,
+ Data: results,
+ })
+}
+
+type branchTagSearchResponse struct {
+ Results []string `json:"results"`
+}
+
+// GetBranchesList get branches for current repo'
+func GetBranchesList(ctx *context.Context) {
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsDeletedBranch: optional.Some(false),
+ ListOptions: db.ListOptionsAll,
+ }
+ branches, err := git_model.FindBranchNames(ctx, branchOpts)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+ resp := &branchTagSearchResponse{}
+ // always put default branch on the top if it exists
+ if slices.Contains(branches, ctx.Repo.Repository.DefaultBranch) {
+ branches = util.SliceRemoveAll(branches, ctx.Repo.Repository.DefaultBranch)
+ branches = append([]string{ctx.Repo.Repository.DefaultBranch}, branches...)
+ }
+ resp.Results = branches
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// GetTagList get tag list for current repo
+func GetTagList(ctx *context.Context) {
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+ resp := &branchTagSearchResponse{}
+ resp.Results = tags
+ ctx.JSON(http.StatusOK, resp)
+}
+
+func PrepareBranchList(ctx *context.Context) {
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsDeletedBranch: optional.Some(false),
+ ListOptions: db.ListOptionsAll,
+ }
+ brs, err := git_model.FindBranchNames(ctx, branchOpts)
+ if err != nil {
+ ctx.ServerError("GetBranches", err)
+ return
+ }
+ // always put default branch on the top if it exists
+ if slices.Contains(brs, ctx.Repo.Repository.DefaultBranch) {
+ brs = util.SliceRemoveAll(brs, ctx.Repo.Repository.DefaultBranch)
+ brs = append([]string{ctx.Repo.Repository.DefaultBranch}, brs...)
+ }
+ ctx.Data["Branches"] = brs
+}
diff --git a/routers/web/repo/search.go b/routers/web/repo/search.go
new file mode 100644
index 0000000..c4f9f9a
--- /dev/null
+++ b/routers/web/repo/search.go
@@ -0,0 +1,105 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const tplSearch base.TplName = "repo/search"
+
+// Search render repository search page
+func Search(ctx *context.Context) {
+ language := ctx.FormTrim("l")
+ keyword := ctx.FormTrim("q")
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Language"] = language
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["PageIsViewCode"] = true
+
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplSearch)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ var total int
+ var searchResults []*code_indexer.Result
+ var searchResultLanguages []*code_indexer.SearchResultLanguages
+ if setting.Indexer.RepoIndexerEnabled {
+ var err error
+ total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, &code_indexer.SearchOptions{
+ RepoIDs: []int64{ctx.Repo.Repository.ID},
+ Keyword: keyword,
+ IsKeywordFuzzy: isFuzzy,
+ Language: language,
+ Paginator: &db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.RepoSearchPagingNum,
+ },
+ })
+ if err != nil {
+ if code_indexer.IsAvailable(ctx) {
+ ctx.ServerError("SearchResults", err)
+ return
+ }
+ ctx.Data["CodeIndexerUnavailable"] = true
+ } else {
+ ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
+ }
+ } else {
+ res, err := git.GrepSearch(ctx, ctx.Repo.GitRepo, keyword, git.GrepOptions{
+ ContextLineNumber: 1,
+ IsFuzzy: isFuzzy,
+ RefName: ctx.Repo.RefName,
+ })
+ if err != nil {
+ ctx.ServerError("GrepSearch", err)
+ return
+ }
+ total = len(res)
+ pageStart := min((page-1)*setting.UI.RepoSearchPagingNum, len(res))
+ pageEnd := min(page*setting.UI.RepoSearchPagingNum, len(res))
+ res = res[pageStart:pageEnd]
+ for _, r := range res {
+ searchResults = append(searchResults, &code_indexer.Result{
+ RepoID: ctx.Repo.Repository.ID,
+ Filename: r.Filename,
+ CommitID: ctx.Repo.CommitID,
+ // UpdatedUnix: not supported yet
+ // Language: not supported yet
+ // Color: not supported yet
+ Lines: code_indexer.HighlightSearchResultCode(r.Filename, r.LineNumbers, r.HighlightedRanges, strings.Join(r.LineCodes, "\n")),
+ })
+ }
+ }
+
+ ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
+ ctx.Data["Repo"] = ctx.Repo.Repository
+ ctx.Data["SourcePath"] = ctx.Repo.Repository.Link()
+ ctx.Data["SearchResults"] = searchResults
+ ctx.Data["SearchResultLanguages"] = searchResultLanguages
+
+ pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "l", "Language")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplSearch)
+}
diff --git a/routers/web/repo/setting/avatar.go b/routers/web/repo/setting/avatar.go
new file mode 100644
index 0000000..504f57c
--- /dev/null
+++ b/routers/web/repo/setting/avatar.go
@@ -0,0 +1,76 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "io"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// UpdateAvatarSetting update repo's avatar
+func UpdateAvatarSetting(ctx *context.Context, form forms.AvatarForm) error {
+ ctxRepo := ctx.Repo.Repository
+
+ if form.Avatar == nil {
+ // No avatar is uploaded and we not removing it here.
+ // No random avatar generated here.
+ // Just exit, no action.
+ if ctxRepo.CustomAvatarRelativePath() == "" {
+ log.Trace("No avatar was uploaded for repo: %d. Default icon will appear instead.", ctxRepo.ID)
+ }
+ return nil
+ }
+
+ r, err := form.Avatar.Open()
+ if err != nil {
+ return fmt.Errorf("Avatar.Open: %w", err)
+ }
+ defer r.Close()
+
+ if form.Avatar.Size > setting.Avatar.MaxFileSize {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_is_too_big", form.Avatar.Size/1024, setting.Avatar.MaxFileSize/1024))
+ }
+
+ data, err := io.ReadAll(r)
+ if err != nil {
+ return fmt.Errorf("io.ReadAll: %w", err)
+ }
+ st := typesniffer.DetectContentType(data)
+ if !(st.IsImage() && !st.IsSvgImage()) {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_not_a_image"))
+ }
+ if err = repo_service.UploadAvatar(ctx, ctxRepo, data); err != nil {
+ return fmt.Errorf("UploadAvatar: %w", err)
+ }
+ return nil
+}
+
+// SettingsAvatar save new POSTed repository avatar
+func SettingsAvatar(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AvatarForm)
+ form.Source = forms.AvatarLocal
+ if err := UpdateAvatarSetting(ctx, *form); err != nil {
+ ctx.Flash.Error(err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_avatar_success"))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+}
+
+// SettingsDeleteAvatar delete repository avatar
+func SettingsDeleteAvatar(ctx *context.Context) {
+ if err := repo_service.DeleteAvatar(ctx, ctx.Repo.Repository); err != nil {
+ ctx.Flash.Error(fmt.Sprintf("DeleteAvatar: %v", err))
+ }
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings")
+}
diff --git a/routers/web/repo/setting/collaboration.go b/routers/web/repo/setting/collaboration.go
new file mode 100644
index 0000000..75b5515
--- /dev/null
+++ b/routers/web/repo/setting/collaboration.go
@@ -0,0 +1,217 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/mailer"
+ org_service "code.gitea.io/gitea/services/org"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// Collaboration render a repository's collaboration page
+func Collaboration(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.collaboration")
+ ctx.Data["PageIsSettingsCollaboration"] = true
+
+ users, err := repo_model.GetCollaborators(ctx, ctx.Repo.Repository.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetCollaborators", err)
+ return
+ }
+ ctx.Data["Collaborators"] = users
+
+ teams, err := organization.GetRepoTeams(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoTeams", err)
+ return
+ }
+ ctx.Data["Teams"] = teams
+ ctx.Data["Repo"] = ctx.Repo.Repository
+ ctx.Data["OrgID"] = ctx.Repo.Repository.OwnerID
+ ctx.Data["OrgName"] = ctx.Repo.Repository.OwnerName
+ ctx.Data["Org"] = ctx.Repo.Repository.Owner
+ ctx.Data["Units"] = unit_model.Units
+
+ ctx.HTML(http.StatusOK, tplCollaboration)
+}
+
+// CollaborationPost response for actions for a collaboration of a repository
+func CollaborationPost(ctx *context.Context) {
+ name := strings.ToLower(ctx.FormString("collaborator"))
+ if len(name) == 0 || ctx.Repo.Owner.LowerName == name {
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ u, err := user_model.GetUserByName(ctx, name)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("form.user_not_exist"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return
+ }
+
+ if !u.IsActive {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_inactive_user"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ // Organization is not allowed to be added as a collaborator.
+ if u.IsOrganization() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.org_not_allowed_to_be_collaborator"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ if got, err := repo_model.IsCollaborator(ctx, ctx.Repo.Repository.ID, u.ID); err == nil && got {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_duplicate"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ // find the owner team of the organization the repo belongs too and
+ // check if the user we're trying to add is an owner.
+ if ctx.Repo.Repository.Owner.IsOrganization() {
+ if isOwner, err := organization.IsOrganizationOwner(ctx, ctx.Repo.Repository.Owner.ID, u.ID); err != nil {
+ ctx.ServerError("IsOrganizationOwner", err)
+ return
+ } else if isOwner {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_owner"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+ }
+
+ if err = repo_module.AddCollaborator(ctx, ctx.Repo.Repository, u); err != nil {
+ if !errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.ServerError("AddCollaborator", err)
+ return
+ }
+
+ // To give an good error message, be precise on who has blocked who.
+ if blockedOurs := user_model.IsBlocked(ctx, ctx.Repo.Repository.OwnerID, u.ID); blockedOurs {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_blocked_our"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_blocked_them"))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if setting.Service.EnableNotifyMail {
+ mailer.SendCollaboratorMail(u, ctx.Doer, ctx.Repo.Repository)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_collaborator_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+}
+
+// ChangeCollaborationAccessMode response for changing access of a collaboration
+func ChangeCollaborationAccessMode(ctx *context.Context) {
+ if err := repo_model.ChangeCollaborationAccessMode(
+ ctx,
+ ctx.Repo.Repository,
+ ctx.FormInt64("uid"),
+ perm.AccessMode(ctx.FormInt("mode"))); err != nil {
+ log.Error("ChangeCollaborationAccessMode: %v", err)
+ }
+}
+
+// DeleteCollaboration delete a collaboration for a repository
+func DeleteCollaboration(ctx *context.Context) {
+ if err := repo_service.DeleteCollaboration(ctx, ctx.Repo.Repository, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteCollaboration: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_collaborator_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
+
+// AddTeamPost response for adding a team to a repository
+func AddTeamPost(ctx *context.Context) {
+ if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ name := strings.ToLower(ctx.FormString("team"))
+ if len(name) == 0 {
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ team, err := organization.OrgFromUser(ctx.Repo.Owner).GetTeam(ctx, name)
+ if err != nil {
+ if organization.IsErrTeamNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("form.team_not_exist"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ } else {
+ ctx.ServerError("GetTeam", err)
+ }
+ return
+ }
+
+ if team.OrgID != ctx.Repo.Repository.OwnerID {
+ ctx.Flash.Error(ctx.Tr("repo.settings.team_not_in_organization"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if organization.HasTeamRepo(ctx, ctx.Repo.Repository.OwnerID, team.ID, ctx.Repo.Repository.ID) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_team_duplicate"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if err = org_service.TeamAddRepository(ctx, team, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("TeamAddRepository", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_team_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
+
+// DeleteTeam response for deleting a team from a repository
+func DeleteTeam(ctx *context.Context) {
+ if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ team, err := organization.GetTeamByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ ctx.ServerError("GetTeamByID", err)
+ return
+ }
+
+ if err = repo_service.RemoveRepositoryFromTeam(ctx, team, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("team.RemoveRepositorys", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_team_success"))
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
diff --git a/routers/web/repo/setting/default_branch.go b/routers/web/repo/setting/default_branch.go
new file mode 100644
index 0000000..881d148
--- /dev/null
+++ b/routers/web/repo/setting/default_branch.go
@@ -0,0 +1,54 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// SetDefaultBranchPost set default branch
+func SetDefaultBranchPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.branches.update_default_branch")
+ ctx.Data["PageIsSettingsBranches"] = true
+
+ repo.PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ repo := ctx.Repo.Repository
+
+ switch ctx.FormString("action") {
+ case "default_branch":
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplBranches)
+ return
+ }
+
+ branch := ctx.FormString("branch")
+ if err := repo_service.SetRepoDefaultBranch(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, branch); err != nil {
+ switch {
+ case git_model.IsErrBranchNotExist(err):
+ ctx.Status(http.StatusNotFound)
+ default:
+ ctx.ServerError("SetDefaultBranch", err)
+ }
+ return
+ }
+
+ log.Trace("Repository basic settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ default:
+ ctx.NotFound("", nil)
+ }
+}
diff --git a/routers/web/repo/setting/deploy_key.go b/routers/web/repo/setting/deploy_key.go
new file mode 100644
index 0000000..abc3eb4
--- /dev/null
+++ b/routers/web/repo/setting/deploy_key.go
@@ -0,0 +1,109 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// DeployKeys render the deploy keys list of a repository page
+func DeployKeys(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys") + " / " + ctx.Tr("secrets.secrets")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+
+ keys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("ListDeployKeys", err)
+ return
+ }
+ ctx.Data["Deploykeys"] = keys
+
+ ctx.HTML(http.StatusOK, tplDeployKeys)
+}
+
+// DeployKeysPost response for adding a deploy key of a repository
+func DeployKeysPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AddKeyForm)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+
+ keys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("ListDeployKeys", err)
+ return
+ }
+ ctx.Data["Deploykeys"] = keys
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplDeployKeys)
+ return
+ }
+
+ content, err := asymkey_model.CheckPublicKeyString(form.Content)
+ if err != nil {
+ if db.IsErrSSHDisabled(err) {
+ ctx.Flash.Info(ctx.Tr("settings.ssh_disabled"))
+ } else if asymkey_model.IsErrKeyUnableVerify(err) {
+ ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key"))
+ } else if err == asymkey_model.ErrKeyIsPrivate {
+ ctx.Data["HasError"] = true
+ ctx.Data["Err_Content"] = true
+ ctx.Flash.Error(ctx.Tr("form.must_use_public_key"))
+ } else {
+ ctx.Data["HasError"] = true
+ ctx.Data["Err_Content"] = true
+ ctx.Flash.Error(ctx.Tr("form.invalid_ssh_key", err.Error()))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
+ return
+ }
+
+ key, err := asymkey_model.AddDeployKey(ctx, ctx.Repo.Repository.ID, form.Title, content, !form.IsWritable)
+ if err != nil {
+ ctx.Data["HasError"] = true
+ switch {
+ case asymkey_model.IsErrDeployKeyAlreadyExist(err):
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_been_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrKeyAlreadyExist(err):
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_key_been_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrKeyNameAlreadyUsed(err):
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_name_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrDeployKeyNameAlreadyUsed(err):
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_name_used"), tplDeployKeys, &form)
+ default:
+ ctx.ServerError("AddDeployKey", err)
+ }
+ return
+ }
+
+ log.Trace("Deploy key added: %d", ctx.Repo.Repository.ID)
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_key_success", key.Name))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
+}
+
+// DeleteDeployKey response for deleting a deploy key
+func DeleteDeployKey(ctx *context.Context) {
+ if err := asymkey_service.DeleteDeployKey(ctx, ctx.Doer, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteDeployKey: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.deploy_key_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/keys")
+}
diff --git a/routers/web/repo/setting/git_hooks.go b/routers/web/repo/setting/git_hooks.go
new file mode 100644
index 0000000..217a01c
--- /dev/null
+++ b/routers/web/repo/setting/git_hooks.go
@@ -0,0 +1,65 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+)
+
+// GitHooks hooks of a repository
+func GitHooks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
+ ctx.Data["PageIsSettingsGitHooks"] = true
+
+ hooks, err := ctx.Repo.GitRepo.Hooks()
+ if err != nil {
+ ctx.ServerError("Hooks", err)
+ return
+ }
+ ctx.Data["Hooks"] = hooks
+
+ ctx.HTML(http.StatusOK, tplGithooks)
+}
+
+// GitHooksEdit render for editing a hook of repository page
+func GitHooksEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
+ ctx.Data["PageIsSettingsGitHooks"] = true
+
+ name := ctx.Params(":name")
+ hook, err := ctx.Repo.GitRepo.GetHook(name)
+ if err != nil {
+ if err == git.ErrNotValidHook {
+ ctx.NotFound("GetHook", err)
+ } else {
+ ctx.ServerError("GetHook", err)
+ }
+ return
+ }
+ ctx.Data["Hook"] = hook
+ ctx.HTML(http.StatusOK, tplGithookEdit)
+}
+
+// GitHooksEditPost response for editing a git hook of a repository
+func GitHooksEditPost(ctx *context.Context) {
+ name := ctx.Params(":name")
+ hook, err := ctx.Repo.GitRepo.GetHook(name)
+ if err != nil {
+ if err == git.ErrNotValidHook {
+ ctx.NotFound("GetHook", err)
+ } else {
+ ctx.ServerError("GetHook", err)
+ }
+ return
+ }
+ hook.Content = ctx.FormString("content")
+ if err = hook.Update(); err != nil {
+ ctx.ServerError("hook.Update", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/hooks/git")
+}
diff --git a/routers/web/repo/setting/lfs.go b/routers/web/repo/setting/lfs.go
new file mode 100644
index 0000000..7e36343
--- /dev/null
+++ b/routers/web/repo/setting/lfs.go
@@ -0,0 +1,562 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "bytes"
+ "fmt"
+ gotemplate "html/template"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "strconv"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/pipeline"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsLFS base.TplName = "repo/settings/lfs"
+ tplSettingsLFSLocks base.TplName = "repo/settings/lfs_locks"
+ tplSettingsLFSFile base.TplName = "repo/settings/lfs_file"
+ tplSettingsLFSFileFind base.TplName = "repo/settings/lfs_file_find"
+ tplSettingsLFSPointers base.TplName = "repo/settings/lfs_pointers"
+)
+
+// LFSFiles shows a repository's LFS files
+func LFSFiles(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFiles", nil)
+ return
+ }
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ total, err := git_model.CountLFSMetaObjects(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("LFSFiles", err)
+ return
+ }
+ ctx.Data["Total"] = total
+
+ pager := context.NewPagination(int(total), setting.UI.ExplorePagingNum, page, 5)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.lfs")
+ ctx.Data["PageIsSettingsLFS"] = true
+ lfsMetaObjects, err := git_model.GetLFSMetaObjects(ctx, ctx.Repo.Repository.ID, pager.Paginater.Current(), setting.UI.ExplorePagingNum)
+ if err != nil {
+ ctx.ServerError("LFSFiles", err)
+ return
+ }
+ ctx.Data["LFSFiles"] = lfsMetaObjects
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFS)
+}
+
+// LFSLocks shows a repository's LFS locks
+func LFSLocks(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSLocks", nil)
+ return
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ total, err := git_model.CountLFSLockByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ ctx.Data["Total"] = total
+
+ pager := context.NewPagination(int(total), setting.UI.ExplorePagingNum, page, 5)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.lfs_locks")
+ ctx.Data["PageIsSettingsLFS"] = true
+ lfsLocks, err := git_model.GetLFSLockByRepoID(ctx, ctx.Repo.Repository.ID, pager.Paginater.Current(), setting.UI.ExplorePagingNum)
+ if err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ if err := lfsLocks.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ ctx.Data["LFSLocks"] = lfsLocks
+
+ if len(lfsLocks) == 0 {
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFSLocks)
+ return
+ }
+
+ // Clone base repo.
+ tmpBasePath, err := repo_module.CreateTemporaryPath("locks")
+ if err != nil {
+ log.Error("Failed to create temporary path: %v", err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ defer func() {
+ if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
+ log.Error("LFSLocks: RemoveTemporaryPath: %v", err)
+ }
+ }()
+
+ if err := git.Clone(ctx, ctx.Repo.Repository.RepoPath(), tmpBasePath, git.CloneRepoOptions{
+ Bare: true,
+ Shared: true,
+ }); err != nil {
+ log.Error("Failed to clone repository: %s (%v)", ctx.Repo.Repository.FullName(), err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("failed to clone repository: %s (%w)", ctx.Repo.Repository.FullName(), err))
+ return
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ log.Error("Unable to open temporary repository: %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("failed to open new temporary repository in: %s %w", tmpBasePath, err))
+ return
+ }
+ defer gitRepo.Close()
+
+ filenames := make([]string, len(lfsLocks))
+
+ for i, lock := range lfsLocks {
+ filenames[i] = lock.Path
+ }
+
+ if err := gitRepo.ReadTreeToIndex(ctx.Repo.Repository.DefaultBranch); err != nil {
+ log.Error("Unable to read the default branch to the index: %s (%v)", ctx.Repo.Repository.DefaultBranch, err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("unable to read the default branch to the index: %s (%w)", ctx.Repo.Repository.DefaultBranch, err))
+ return
+ }
+
+ ctx.Data["Lockables"], err = lockablesGitAttributes(gitRepo, lfsLocks)
+ if err != nil {
+ log.Error("Unable to get lockablesGitAttributes in %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ filelist, err := gitRepo.LsFiles(filenames...)
+ if err != nil {
+ log.Error("Unable to lsfiles in %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ fileset := make(container.Set[string], len(filelist))
+ fileset.AddMultiple(filelist...)
+
+ linkable := make([]bool, len(lfsLocks))
+ for i, lock := range lfsLocks {
+ linkable[i] = fileset.Contains(lock.Path)
+ }
+ ctx.Data["Linkable"] = linkable
+
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFSLocks)
+}
+
+func lockablesGitAttributes(gitRepo *git.Repository, lfsLocks []*git_model.LFSLock) ([]bool, error) {
+ checker, err := gitRepo.GitAttributeChecker("", "lockable")
+ if err != nil {
+ return nil, fmt.Errorf("could not GitAttributeChecker: %w", err)
+ }
+ defer checker.Close()
+
+ lockables := make([]bool, len(lfsLocks))
+ for i, lock := range lfsLocks {
+ attrs, err := checker.CheckPath(lock.Path)
+ if err != nil {
+ return nil, fmt.Errorf("could not CheckPath(%s): %w", lock.Path, err)
+ }
+ lockables[i] = attrs["lockable"].Bool().Value()
+ }
+ return lockables, nil
+}
+
+// LFSLockFile locks a file
+func LFSLockFile(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSLocks", nil)
+ return
+ }
+ originalPath := ctx.FormString("path")
+ lockPath := originalPath
+ if len(lockPath) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_locking_path", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ if lockPath[len(lockPath)-1] == '/' {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_lock_directory", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ lockPath = util.PathJoinRel(lockPath)
+ if len(lockPath) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_locking_path", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+
+ _, err := git_model.CreateLFSLock(ctx, ctx.Repo.Repository, &git_model.LFSLock{
+ Path: lockPath,
+ OwnerID: ctx.Doer.ID,
+ })
+ if err != nil {
+ if git_model.IsErrLFSLockAlreadyExist(err) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_lock_already_exists", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ ctx.ServerError("LFSLockFile", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+}
+
+// LFSUnlock forcibly unlocks an LFS lock
+func LFSUnlock(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSUnlock", nil)
+ return
+ }
+ _, err := git_model.DeleteLFSLockByID(ctx, ctx.ParamsInt64("lid"), ctx.Repo.Repository, ctx.Doer, true)
+ if err != nil {
+ ctx.ServerError("LFSUnlock", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+}
+
+// LFSFileGet serves a single LFS file
+func LFSFileGet(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+ oid := ctx.Params("oid")
+
+ p := lfs.Pointer{Oid: oid}
+ if !p.IsValid() {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+
+ ctx.Data["Title"] = oid
+ ctx.Data["PageIsSettingsLFS"] = true
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, oid)
+ if err != nil {
+ if err == git_model.ErrLFSObjectNotExist {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.ServerError("LFSFileGet", err)
+ return
+ }
+ ctx.Data["LFSFile"] = meta
+ dataRc, err := lfs.ReadMetaObject(meta.Pointer)
+ if err != nil {
+ ctx.ServerError("LFSFileGet", err)
+ return
+ }
+ defer dataRc.Close()
+ buf := make([]byte, 1024)
+ n, err := util.ReadAtMost(dataRc, buf)
+ if err != nil {
+ ctx.ServerError("Data", err)
+ return
+ }
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ ctx.Data["IsTextFile"] = st.IsText()
+ isRepresentableAsText := st.IsRepresentableAsText()
+
+ fileSize := meta.Size
+ ctx.Data["FileSize"] = meta.Size
+ ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
+ switch {
+ case isRepresentableAsText:
+ if st.IsSvgImage() {
+ ctx.Data["IsImageFile"] = true
+ }
+
+ if fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ // Building code view blocks with line number on server side.
+ escapedContent := &bytes.Buffer{}
+ ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, escapedContent, ctx.Locale, charset.FileviewContext)
+
+ var output bytes.Buffer
+ lines := strings.Split(escapedContent.String(), "\n")
+ // Remove blank line at the end of file
+ if len(lines) > 0 && lines[len(lines)-1] == "" {
+ lines = lines[:len(lines)-1]
+ }
+ for index, line := range lines {
+ line = gotemplate.HTMLEscapeString(line)
+ if index != len(lines)-1 {
+ line += "\n"
+ }
+ output.WriteString(fmt.Sprintf(`<li class="L%d" rel="L%d">%s</li>`, index+1, index+1, line))
+ }
+ ctx.Data["FileContent"] = gotemplate.HTML(output.String())
+
+ output.Reset()
+ for i := 0; i < len(lines); i++ {
+ output.WriteString(fmt.Sprintf(`<span id="L%d">%d</span>`, i+1, i+1))
+ }
+ ctx.Data["LineNums"] = gotemplate.HTML(output.String())
+
+ case st.IsPDF():
+ ctx.Data["IsPDFFile"] = true
+ case st.IsVideo():
+ ctx.Data["IsVideoFile"] = true
+ case st.IsAudio():
+ ctx.Data["IsAudioFile"] = true
+ case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()):
+ ctx.Data["IsImageFile"] = true
+ }
+ ctx.HTML(http.StatusOK, tplSettingsLFSFile)
+}
+
+// LFSDelete disassociates the provided oid from the repository and if the lfs file is no longer associated with any repositories - deletes it
+func LFSDelete(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSDelete", nil)
+ return
+ }
+ oid := ctx.Params("oid")
+ p := lfs.Pointer{Oid: oid}
+ if !p.IsValid() {
+ ctx.NotFound("LFSDelete", nil)
+ return
+ }
+
+ count, err := git_model.RemoveLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, oid)
+ if err != nil {
+ ctx.ServerError("LFSDelete", err)
+ return
+ }
+ // FIXME: Warning: the LFS store is not locked - and can't be locked - there could be a race condition here
+ // Please note a similar condition happens in models/repo.go DeleteRepository
+ if count == 0 {
+ oidPath := path.Join(oid[0:2], oid[2:4], oid[4:])
+ err = storage.LFS.Delete(oidPath)
+ if err != nil {
+ ctx.ServerError("LFSDelete", err)
+ return
+ }
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs")
+}
+
+// LFSFileFind guesses a sha for the provided oid (or uses the provided sha) and then finds the commits that contain this sha
+func LFSFileFind(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFind", nil)
+ return
+ }
+ oid := ctx.FormString("oid")
+ size := ctx.FormInt64("size")
+ if len(oid) == 0 || size == 0 {
+ ctx.NotFound("LFSFind", nil)
+ return
+ }
+ sha := ctx.FormString("sha")
+ ctx.Data["Title"] = oid
+ ctx.Data["PageIsSettingsLFS"] = true
+ objectFormat := ctx.Repo.GetObjectFormat()
+ var objectID git.ObjectID
+ if len(sha) == 0 {
+ pointer := lfs.Pointer{Oid: oid, Size: size}
+ objectID = git.ComputeBlobHash(objectFormat, []byte(pointer.StringContent()))
+ sha = objectID.String()
+ } else {
+ objectID = git.MustIDFromString(sha)
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+ ctx.Data["Oid"] = oid
+ ctx.Data["Size"] = size
+ ctx.Data["SHA"] = sha
+
+ results, err := pipeline.FindLFSFile(ctx.Repo.GitRepo, objectID)
+ if err != nil && err != io.EOF {
+ log.Error("Failure in FindLFSFile: %v", err)
+ ctx.ServerError("LFSFind: FindLFSFile.", err)
+ return
+ }
+
+ ctx.Data["Results"] = results
+ ctx.HTML(http.StatusOK, tplSettingsLFSFileFind)
+}
+
+// LFSPointerFiles will search the repository for pointer files and report which are missing LFS files in the content store
+func LFSPointerFiles(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.Data["PageIsSettingsLFS"] = true
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+
+ var err error
+ err = func() error {
+ pointerChan := make(chan lfs.PointerBlob)
+ errChan := make(chan error, 1)
+ go lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan, errChan)
+
+ numPointers := 0
+ var numAssociated, numNoExist, numAssociatable int
+
+ type pointerResult struct {
+ SHA string
+ Oid string
+ Size int64
+ InRepo bool
+ Exists bool
+ Accessible bool
+ Associatable bool
+ }
+
+ results := []pointerResult{}
+
+ contentStore := lfs.NewContentStore()
+ repo := ctx.Repo.Repository
+
+ for pointerBlob := range pointerChan {
+ numPointers++
+
+ result := pointerResult{
+ SHA: pointerBlob.Hash,
+ Oid: pointerBlob.Oid,
+ Size: pointerBlob.Size,
+ }
+
+ if _, err := git_model.GetLFSMetaObjectByOid(ctx, repo.ID, pointerBlob.Oid); err != nil {
+ if err != git_model.ErrLFSObjectNotExist {
+ return err
+ }
+ } else {
+ result.InRepo = true
+ }
+
+ result.Exists, err = contentStore.Exists(pointerBlob.Pointer)
+ if err != nil {
+ return err
+ }
+
+ if result.Exists {
+ if !result.InRepo {
+ // Can we fix?
+ // OK well that's "simple"
+ // - we need to check whether current user has access to a repo that has access to the file
+ result.Associatable, err = git_model.LFSObjectAccessible(ctx, ctx.Doer, pointerBlob.Oid)
+ if err != nil {
+ return err
+ }
+ if !result.Associatable {
+ associated, err := git_model.ExistsLFSObject(ctx, pointerBlob.Oid)
+ if err != nil {
+ return err
+ }
+ result.Associatable = !associated
+ }
+ }
+ }
+
+ result.Accessible = result.InRepo || result.Associatable
+
+ if result.InRepo {
+ numAssociated++
+ }
+ if !result.Exists {
+ numNoExist++
+ }
+ if result.Associatable {
+ numAssociatable++
+ }
+
+ results = append(results, result)
+ }
+
+ err, has := <-errChan
+ if has {
+ return err
+ }
+
+ ctx.Data["Pointers"] = results
+ ctx.Data["NumPointers"] = numPointers
+ ctx.Data["NumAssociated"] = numAssociated
+ ctx.Data["NumAssociatable"] = numAssociatable
+ ctx.Data["NumNoExist"] = numNoExist
+ ctx.Data["NumNotAssociated"] = numPointers - numAssociated
+
+ return nil
+ }()
+ if err != nil {
+ ctx.ServerError("LFSPointerFiles", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsLFSPointers)
+}
+
+// LFSAutoAssociate auto associates accessible lfs files
+func LFSAutoAssociate(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSAutoAssociate", nil)
+ return
+ }
+ oids := ctx.FormStrings("oid")
+ metas := make([]*git_model.LFSMetaObject, len(oids))
+ for i, oid := range oids {
+ idx := strings.IndexRune(oid, ' ')
+ if idx < 0 || idx+1 > len(oid) {
+ ctx.ServerError("LFSAutoAssociate", fmt.Errorf("illegal oid input: %s", oid))
+ return
+ }
+ var err error
+ metas[i] = &git_model.LFSMetaObject{}
+ metas[i].Size, err = strconv.ParseInt(oid[idx+1:], 10, 64)
+ if err != nil {
+ ctx.ServerError("LFSAutoAssociate", fmt.Errorf("illegal oid input: %s %w", oid, err))
+ return
+ }
+ metas[i].Oid = oid[:idx]
+ // metas[i].RepositoryID = ctx.Repo.Repository.ID
+ }
+ if err := git_model.LFSAutoAssociate(ctx, metas, ctx.Doer, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("LFSAutoAssociate", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs")
+}
diff --git a/routers/web/repo/setting/main_test.go b/routers/web/repo/setting/main_test.go
new file mode 100644
index 0000000..c414b85
--- /dev/null
+++ b/routers/web/repo/setting/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/repo/setting/protected_branch.go b/routers/web/repo/setting/protected_branch.go
new file mode 100644
index 0000000..b2f5798
--- /dev/null
+++ b/routers/web/repo/setting/protected_branch.go
@@ -0,0 +1,347 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ pull_service "code.gitea.io/gitea/services/pull"
+ "code.gitea.io/gitea/services/repository"
+
+ "github.com/gobwas/glob"
+)
+
+const (
+ tplProtectedBranch base.TplName = "repo/settings/protected_branch"
+)
+
+// ProtectedBranchRules render the page to protect the repository
+func ProtectedBranchRules(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.branches")
+ ctx.Data["PageIsSettingsBranches"] = true
+
+ rules, err := git_model.FindRepoProtectedBranchRules(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetProtectedBranches", err)
+ return
+ }
+ ctx.Data["ProtectedBranches"] = rules
+
+ repo.PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplBranches)
+}
+
+// SettingsProtectedBranch renders the protected branch setting page
+func SettingsProtectedBranch(c *context.Context) {
+ ruleName := c.FormString("rule_name")
+ var rule *git_model.ProtectedBranch
+ if ruleName != "" {
+ var err error
+ rule, err = git_model.GetProtectedBranchRuleByName(c, c.Repo.Repository.ID, ruleName)
+ if err != nil {
+ c.ServerError("GetProtectBranchOfRepoByName", err)
+ return
+ }
+ }
+
+ if rule == nil {
+ // No options found, create defaults.
+ rule = &git_model.ProtectedBranch{}
+ }
+
+ c.Data["PageIsSettingsBranches"] = true
+ c.Data["Title"] = c.Locale.TrString("repo.settings.protected_branch") + " - " + rule.RuleName
+
+ users, err := access_model.GetRepoReaders(c, c.Repo.Repository)
+ if err != nil {
+ c.ServerError("Repo.Repository.GetReaders", err)
+ return
+ }
+ c.Data["Users"] = users
+ c.Data["whitelist_users"] = strings.Join(base.Int64sToStrings(rule.WhitelistUserIDs), ",")
+ c.Data["merge_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistUserIDs), ",")
+ c.Data["approvals_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistUserIDs), ",")
+ c.Data["status_check_contexts"] = strings.Join(rule.StatusCheckContexts, "\n")
+ contexts, _ := git_model.FindRepoRecentCommitStatusContexts(c, c.Repo.Repository.ID, 7*24*time.Hour) // Find last week status check contexts
+ c.Data["recent_status_checks"] = contexts
+
+ if c.Repo.Owner.IsOrganization() {
+ teams, err := organization.OrgFromUser(c.Repo.Owner).TeamsWithAccessToRepo(c, c.Repo.Repository.ID, perm.AccessModeRead)
+ if err != nil {
+ c.ServerError("Repo.Owner.TeamsWithAccessToRepo", err)
+ return
+ }
+ c.Data["Teams"] = teams
+ c.Data["whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.WhitelistTeamIDs), ",")
+ c.Data["merge_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistTeamIDs), ",")
+ c.Data["approvals_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistTeamIDs), ",")
+ }
+
+ c.Data["Rule"] = rule
+ c.HTML(http.StatusOK, tplProtectedBranch)
+}
+
+// SettingsProtectedBranchPost updates the protected branch settings
+func SettingsProtectedBranchPost(ctx *context.Context) {
+ f := web.GetForm(ctx).(*forms.ProtectBranchForm)
+ var protectBranch *git_model.ProtectedBranch
+ if f.RuleName == "" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit", ctx.Repo.RepoLink))
+ return
+ }
+
+ var err error
+ if f.RuleID > 0 {
+ // If the RuleID isn't 0, it must be an edit operation. So we get rule by id.
+ protectBranch, err = git_model.GetProtectedBranchRuleByID(ctx, ctx.Repo.Repository.ID, f.RuleID)
+ if err != nil {
+ ctx.ServerError("GetProtectBranchOfRepoByID", err)
+ return
+ }
+ if protectBranch != nil && protectBranch.RuleName != f.RuleName {
+ // RuleName changed. We need to check if there is a rule with the same name.
+ // If a rule with the same name exists, an error should be returned.
+ sameNameProtectBranch, err := git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, f.RuleName)
+ if err != nil {
+ ctx.ServerError("GetProtectBranchOfRepoByName", err)
+ return
+ }
+ if sameNameProtectBranch != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_duplicate_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+ return
+ }
+ }
+ } else {
+ // Check if a rule already exists with this rulename, if so redirect to it.
+ protectBranch, err = git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, f.RuleName)
+ if err != nil {
+ ctx.ServerError("GetProtectedBranchRuleByName", err)
+ return
+ }
+ if protectBranch != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_duplicate_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+ return
+ }
+ }
+ if protectBranch == nil {
+ // No options found, create defaults.
+ protectBranch = &git_model.ProtectedBranch{
+ RepoID: ctx.Repo.Repository.ID,
+ RuleName: f.RuleName,
+ }
+ }
+
+ var whitelistUsers, whitelistTeams, mergeWhitelistUsers, mergeWhitelistTeams, approvalsWhitelistUsers, approvalsWhitelistTeams []int64
+ protectBranch.RuleName = f.RuleName
+ if f.RequiredApprovals < 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_approvals_min"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, f.RuleName))
+ return
+ }
+
+ switch f.EnablePush {
+ case "all":
+ protectBranch.CanPush = true
+ protectBranch.EnableWhitelist = false
+ protectBranch.WhitelistDeployKeys = false
+ case "whitelist":
+ protectBranch.CanPush = true
+ protectBranch.EnableWhitelist = true
+ protectBranch.WhitelistDeployKeys = f.WhitelistDeployKeys
+ if strings.TrimSpace(f.WhitelistUsers) != "" {
+ whitelistUsers, _ = base.StringsToInt64s(strings.Split(f.WhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.WhitelistTeams) != "" {
+ whitelistTeams, _ = base.StringsToInt64s(strings.Split(f.WhitelistTeams, ","))
+ }
+ default:
+ protectBranch.CanPush = false
+ protectBranch.EnableWhitelist = false
+ protectBranch.WhitelistDeployKeys = false
+ }
+
+ protectBranch.EnableMergeWhitelist = f.EnableMergeWhitelist
+ if f.EnableMergeWhitelist {
+ if strings.TrimSpace(f.MergeWhitelistUsers) != "" {
+ mergeWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.MergeWhitelistTeams) != "" {
+ mergeWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistTeams, ","))
+ }
+ }
+
+ protectBranch.EnableStatusCheck = f.EnableStatusCheck
+ if f.EnableStatusCheck {
+ patterns := strings.Split(strings.ReplaceAll(f.StatusCheckContexts, "\r", "\n"), "\n")
+ validPatterns := make([]string, 0, len(patterns))
+ for _, pattern := range patterns {
+ trimmed := strings.TrimSpace(pattern)
+ if trimmed == "" {
+ continue
+ }
+ if _, err := glob.Compile(trimmed); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protect_invalid_status_check_pattern", pattern))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, url.QueryEscape(protectBranch.RuleName)))
+ return
+ }
+ validPatterns = append(validPatterns, trimmed)
+ }
+ if len(validPatterns) == 0 {
+ // if status check is enabled, patterns slice is not allowed to be empty
+ ctx.Flash.Error(ctx.Tr("repo.settings.protect_no_valid_status_check_patterns"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, url.QueryEscape(protectBranch.RuleName)))
+ return
+ }
+ protectBranch.StatusCheckContexts = validPatterns
+ } else {
+ protectBranch.StatusCheckContexts = nil
+ }
+
+ protectBranch.RequiredApprovals = f.RequiredApprovals
+ protectBranch.EnableApprovalsWhitelist = f.EnableApprovalsWhitelist
+ if f.EnableApprovalsWhitelist {
+ if strings.TrimSpace(f.ApprovalsWhitelistUsers) != "" {
+ approvalsWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.ApprovalsWhitelistTeams) != "" {
+ approvalsWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistTeams, ","))
+ }
+ }
+ protectBranch.BlockOnRejectedReviews = f.BlockOnRejectedReviews
+ protectBranch.BlockOnOfficialReviewRequests = f.BlockOnOfficialReviewRequests
+ protectBranch.DismissStaleApprovals = f.DismissStaleApprovals
+ protectBranch.IgnoreStaleApprovals = f.IgnoreStaleApprovals
+ protectBranch.RequireSignedCommits = f.RequireSignedCommits
+ protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
+ protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
+ protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
+ protectBranch.ApplyToAdmins = f.ApplyToAdmins
+
+ err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
+ UserIDs: whitelistUsers,
+ TeamIDs: whitelistTeams,
+ MergeUserIDs: mergeWhitelistUsers,
+ MergeTeamIDs: mergeWhitelistTeams,
+ ApprovalsUserIDs: approvalsWhitelistUsers,
+ ApprovalsTeamIDs: approvalsWhitelistTeams,
+ })
+ if err != nil {
+ ctx.ServerError("UpdateProtectBranch", err)
+ return
+ }
+
+ // FIXME: since we only need to recheck files protected rules, we could improve this
+ matchedBranches, err := git_model.FindAllMatchedBranches(ctx, ctx.Repo.Repository.ID, protectBranch.RuleName)
+ if err != nil {
+ ctx.ServerError("FindAllMatchedBranches", err)
+ return
+ }
+ for _, branchName := range matchedBranches {
+ if err = pull_service.CheckPRsForBaseBranch(ctx, ctx.Repo.Repository, branchName); err != nil {
+ ctx.ServerError("CheckPRsForBaseBranch", err)
+ return
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_protect_branch_success", protectBranch.RuleName))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+}
+
+// DeleteProtectedBranchRulePost delete protected branch rule by id
+func DeleteProtectedBranchRulePost(ctx *context.Context) {
+ ruleID := ctx.ParamsInt64("id")
+ if ruleID <= 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ rule, err := git_model.GetProtectedBranchRuleByID(ctx, ctx.Repo.Repository.ID, ruleID)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if rule == nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if err := git_model.DeleteProtectedBranch(ctx, ctx.Repo.Repository, ruleID); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", rule.RuleName))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_protected_branch_success", rule.RuleName))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+}
+
+// RenameBranchPost responses for rename a branch
+func RenameBranchPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RenameBranchForm)
+
+ if !ctx.Repo.CanCreateBranch() {
+ ctx.NotFound("RenameBranch", nil)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.GetErrMsg())
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ msg, err := repository.RenameBranch(ctx, ctx.Repo.Repository, ctx.Doer, ctx.Repo.GitRepo, form.From, form.To)
+ if err != nil {
+ if errors.Is(err, git_model.ErrBranchIsProtected) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_protected", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_already_exists", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ } else {
+ ctx.ServerError("RenameBranch", err)
+ }
+ return
+ }
+
+ if msg == "target_exist" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_exist", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if msg == "from_not_exist" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_not_exist", form.From))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.rename_branch_success", form.From, form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+}
diff --git a/routers/web/repo/setting/protected_tag.go b/routers/web/repo/setting/protected_tag.go
new file mode 100644
index 0000000..2c25b65
--- /dev/null
+++ b/routers/web/repo/setting/protected_tag.go
@@ -0,0 +1,188 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplTags base.TplName = "repo/settings/tags"
+)
+
+// Tags render the page to protect tags
+func ProtectedTags(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplTags)
+}
+
+// NewProtectedTagPost handles creation of a protect tag
+func NewProtectedTagPost(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTags)
+ return
+ }
+
+ repo := ctx.Repo.Repository
+ form := web.GetForm(ctx).(*forms.ProtectTagForm)
+
+ pt := &git_model.ProtectedTag{
+ RepoID: repo.ID,
+ NamePattern: strings.TrimSpace(form.NamePattern),
+ }
+
+ if strings.TrimSpace(form.AllowlistUsers) != "" {
+ pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ","))
+ }
+ if strings.TrimSpace(form.AllowlistTeams) != "" {
+ pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ","))
+ }
+
+ if err := git_model.InsertProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("InsertProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+}
+
+// EditProtectedTag render the page to edit a protect tag
+func EditProtectedTag(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.Data["PageIsEditProtectedTag"] = true
+
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ ctx.Data["name_pattern"] = pt.NamePattern
+ ctx.Data["allowlist_users"] = strings.Join(base.Int64sToStrings(pt.AllowlistUserIDs), ",")
+ ctx.Data["allowlist_teams"] = strings.Join(base.Int64sToStrings(pt.AllowlistTeamIDs), ",")
+
+ ctx.HTML(http.StatusOK, tplTags)
+}
+
+// EditProtectedTagPost handles creation of a protect tag
+func EditProtectedTagPost(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.Data["PageIsEditProtectedTag"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTags)
+ return
+ }
+
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.ProtectTagForm)
+
+ pt.NamePattern = strings.TrimSpace(form.NamePattern)
+ pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ","))
+ pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ","))
+
+ if err := git_model.UpdateProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("UpdateProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags")
+}
+
+// DeleteProtectedTagPost handles deletion of a protected tag
+func DeleteProtectedTagPost(ctx *context.Context) {
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ if err := git_model.DeleteProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("DeleteProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags")
+}
+
+func setTagsContext(ctx *context.Context) error {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.tags")
+ ctx.Data["PageIsSettingsTags"] = true
+
+ protectedTags, err := git_model.GetProtectedTags(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetProtectedTags", err)
+ return err
+ }
+ ctx.Data["ProtectedTags"] = protectedTags
+
+ users, err := access_model.GetRepoReaders(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("Repo.Repository.GetReaders", err)
+ return err
+ }
+ ctx.Data["Users"] = users
+
+ if ctx.Repo.Owner.IsOrganization() {
+ teams, err := organization.OrgFromUser(ctx.Repo.Owner).TeamsWithAccessToRepo(ctx, ctx.Repo.Repository.ID, perm.AccessModeRead)
+ if err != nil {
+ ctx.ServerError("Repo.Owner.TeamsWithAccessToRepo", err)
+ return err
+ }
+ ctx.Data["Teams"] = teams
+ }
+
+ return nil
+}
+
+func selectProtectedTagByContext(ctx *context.Context) *git_model.ProtectedTag {
+ id := ctx.FormInt64("id")
+ if id == 0 {
+ id = ctx.ParamsInt64(":id")
+ }
+
+ tag, err := git_model.GetProtectedTagByID(ctx, id)
+ if err != nil {
+ ctx.ServerError("GetProtectedTagByID", err)
+ return nil
+ }
+
+ if tag != nil && tag.RepoID == ctx.Repo.Repository.ID {
+ return tag
+ }
+
+ ctx.NotFound("", fmt.Errorf("ProtectedTag[%v] not associated to repository %v", id, ctx.Repo.Repository))
+
+ return nil
+}
diff --git a/routers/web/repo/setting/runners.go b/routers/web/repo/setting/runners.go
new file mode 100644
index 0000000..a47d3b4
--- /dev/null
+++ b/routers/web/repo/setting/runners.go
@@ -0,0 +1,187 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+ "net/url"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ actions_shared "code.gitea.io/gitea/routers/web/shared/actions"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // TODO: Separate secrets from runners when layout is ready
+ tplRepoRunners base.TplName = "repo/settings/actions"
+ tplOrgRunners base.TplName = "org/settings/actions"
+ tplAdminRunners base.TplName = "admin/actions"
+ tplUserRunners base.TplName = "user/settings/actions"
+ tplRepoRunnerEdit base.TplName = "repo/settings/runner_edit"
+ tplOrgRunnerEdit base.TplName = "org/settings/runners_edit"
+ tplAdminRunnerEdit base.TplName = "admin/runners/edit"
+ tplUserRunnerEdit base.TplName = "user/settings/runner_edit"
+)
+
+type runnersCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsAdmin bool
+ IsUser bool
+ RunnersTemplate base.TplName
+ RunnerEditTemplate base.TplName
+ RedirectLink string
+}
+
+func getRunnersCtx(ctx *context.Context) (*runnersCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &runnersCtx{
+ RepoID: ctx.Repo.Repository.ID,
+ OwnerID: 0,
+ IsRepo: true,
+ RunnersTemplate: tplRepoRunners,
+ RunnerEditTemplate: tplRepoRunnerEdit,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &runnersCtx{
+ RepoID: 0,
+ OwnerID: ctx.Org.Organization.ID,
+ IsOrg: true,
+ RunnersTemplate: tplOrgRunners,
+ RunnerEditTemplate: tplOrgRunnerEdit,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &runnersCtx{
+ RepoID: 0,
+ OwnerID: 0,
+ IsAdmin: true,
+ RunnersTemplate: tplAdminRunners,
+ RunnerEditTemplate: tplAdminRunnerEdit,
+ RedirectLink: setting.AppSubURL + "/admin/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &runnersCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ RunnersTemplate: tplUserRunners,
+ RunnerEditTemplate: tplUserRunnerEdit,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/runners/",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Runners context")
+}
+
+// Runners render settings/actions/runners page for repo level
+func Runners(ctx *context.Context) {
+ ctx.Data["PageIsSharedSettingsRunners"] = true
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageType"] = "runners"
+
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ opts := actions_model.FindRunnerOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: 100,
+ },
+ Sort: ctx.Req.URL.Query().Get("sort"),
+ Filter: ctx.Req.URL.Query().Get("q"),
+ }
+ if rCtx.IsRepo {
+ opts.RepoID = rCtx.RepoID
+ opts.WithAvailable = true
+ } else if rCtx.IsOrg || rCtx.IsUser {
+ opts.OwnerID = rCtx.OwnerID
+ opts.WithAvailable = true
+ }
+ actions_shared.RunnersList(ctx, opts)
+
+ ctx.HTML(http.StatusOK, rCtx.RunnersTemplate)
+}
+
+// RunnersEdit renders runner edit page for repository level
+func RunnersEdit(ctx *context.Context) {
+ ctx.Data["PageIsSharedSettingsRunners"] = true
+ ctx.Data["Title"] = ctx.Tr("actions.runners.edit_runner")
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ actions_shared.RunnerDetails(ctx, page,
+ ctx.ParamsInt64(":runnerid"), rCtx.OwnerID, rCtx.RepoID,
+ )
+ ctx.HTML(http.StatusOK, rCtx.RunnerEditTemplate)
+}
+
+func RunnersEditPost(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerDetailsEditPost(ctx, ctx.ParamsInt64(":runnerid"),
+ rCtx.OwnerID, rCtx.RepoID,
+ rCtx.RedirectLink+url.PathEscape(ctx.Params(":runnerid")))
+}
+
+func ResetRunnerRegistrationToken(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerResetRegistrationToken(ctx, rCtx.OwnerID, rCtx.RepoID, rCtx.RedirectLink)
+}
+
+// RunnerDeletePost response for deleting runner
+func RunnerDeletePost(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerDeletePost(ctx, ctx.ParamsInt64(":runnerid"), rCtx.RedirectLink, rCtx.RedirectLink+url.PathEscape(ctx.Params(":runnerid")))
+}
+
+func RedirectToDefaultSetting(ctx *context.Context) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/actions/runners")
+}
diff --git a/routers/web/repo/setting/secrets.go b/routers/web/repo/setting/secrets.go
new file mode 100644
index 0000000..d4d56bf
--- /dev/null
+++ b/routers/web/repo/setting/secrets.go
@@ -0,0 +1,127 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared "code.gitea.io/gitea/routers/web/shared/secrets"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // TODO: Separate secrets from runners when layout is ready
+ tplRepoSecrets base.TplName = "repo/settings/actions"
+ tplOrgSecrets base.TplName = "org/settings/actions"
+ tplUserSecrets base.TplName = "user/settings/actions"
+)
+
+type secretsCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsUser bool
+ SecretsTemplate base.TplName
+ RedirectLink string
+}
+
+func getSecretsCtx(ctx *context.Context) (*secretsCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &secretsCtx{
+ OwnerID: 0,
+ RepoID: ctx.Repo.Repository.ID,
+ IsRepo: true,
+ SecretsTemplate: tplRepoSecrets,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/secrets",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &secretsCtx{
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: 0,
+ IsOrg: true,
+ SecretsTemplate: tplOrgSecrets,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/secrets",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &secretsCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ SecretsTemplate: tplUserSecrets,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/secrets",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Secrets context")
+}
+
+func Secrets(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageType"] = "secrets"
+ ctx.Data["PageIsSharedSettingsSecrets"] = true
+
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+
+ if sCtx.IsRepo {
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+ }
+
+ shared.SetSecretsContext(ctx, sCtx.OwnerID, sCtx.RepoID)
+ if ctx.Written() {
+ return
+ }
+ ctx.HTML(http.StatusOK, sCtx.SecretsTemplate)
+}
+
+func SecretsPost(ctx *context.Context) {
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.PerformSecretsPost(
+ ctx,
+ sCtx.OwnerID,
+ sCtx.RepoID,
+ sCtx.RedirectLink,
+ )
+}
+
+func SecretsDelete(ctx *context.Context) {
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+ shared.PerformSecretsDelete(
+ ctx,
+ sCtx.OwnerID,
+ sCtx.RepoID,
+ sCtx.RedirectLink,
+ )
+}
diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go
new file mode 100644
index 0000000..aee2e2f
--- /dev/null
+++ b/routers/web/repo/setting/setting.go
@@ -0,0 +1,1115 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/indexer/stats"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/validation"
+ "code.gitea.io/gitea/modules/web"
+ actions_service "code.gitea.io/gitea/services/actions"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/federation"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/migrations"
+ mirror_service "code.gitea.io/gitea/services/mirror"
+ repo_service "code.gitea.io/gitea/services/repository"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+)
+
+const (
+ tplSettingsOptions base.TplName = "repo/settings/options"
+ tplSettingsUnits base.TplName = "repo/settings/units"
+ tplCollaboration base.TplName = "repo/settings/collaboration"
+ tplBranches base.TplName = "repo/settings/branches"
+ tplGithooks base.TplName = "repo/settings/githooks"
+ tplGithookEdit base.TplName = "repo/settings/githook_edit"
+ tplDeployKeys base.TplName = "repo/settings/deploy_keys"
+)
+
+// SettingsCtxData is a middleware that sets all the general context data for the
+// settings template.
+func SettingsCtxData(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.options")
+ ctx.Data["PageIsSettingsOptions"] = true
+ ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+ ctx.Data["DisableNewPushMirrors"] = setting.Mirror.DisableNewPush
+ ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
+ ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
+
+ signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath())
+ ctx.Data["SigningKeyAvailable"] = len(signing) > 0
+ ctx.Data["SigningSettings"] = setting.Repository.Signing
+ ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ if ctx.Doer.IsAdmin {
+ if setting.Indexer.RepoIndexerEnabled {
+ status, err := repo_model.GetIndexerStatus(ctx, ctx.Repo.Repository, repo_model.RepoIndexerTypeCode)
+ if err != nil {
+ ctx.ServerError("repo.indexer_status", err)
+ return
+ }
+ ctx.Data["CodeIndexerStatus"] = status
+ }
+ status, err := repo_model.GetIndexerStatus(ctx, ctx.Repo.Repository, repo_model.RepoIndexerTypeStats)
+ if err != nil {
+ ctx.ServerError("repo.indexer_status", err)
+ return
+ }
+ ctx.Data["StatsIndexerStatus"] = status
+ }
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, ctx.Repo.Repository.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetPushMirrorsByRepoID", err)
+ return
+ }
+ ctx.Data["PushMirrors"] = pushMirrors
+ ctx.Data["CanUseSSHMirroring"] = git.HasSSHExecutable
+}
+
+// Units show a repositorys unit settings page
+func Units(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.units.units")
+ ctx.Data["PageIsRepoSettingsUnits"] = true
+
+ ctx.HTML(http.StatusOK, tplSettingsUnits)
+}
+
+func UnitsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RepoUnitSettingForm)
+
+ repo := ctx.Repo.Repository
+
+ var repoChanged bool
+ var units []repo_model.RepoUnit
+ var deleteUnitTypes []unit_model.Type
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ if repo.CloseIssuesViaCommitInAnyBranch != form.EnableCloseIssuesViaCommitInAnyBranch {
+ repo.CloseIssuesViaCommitInAnyBranch = form.EnableCloseIssuesViaCommitInAnyBranch
+ repoChanged = true
+ }
+
+ if form.EnableCode && !unit_model.TypeCode.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeCode,
+ })
+ } else if !unit_model.TypeCode.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeCode)
+ }
+
+ if form.EnableWiki && form.EnableExternalWiki && !unit_model.TypeExternalWiki.UnitGlobalDisabled() {
+ if !validation.IsValidExternalURL(form.ExternalWikiURL) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.external_wiki_url_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeExternalWiki,
+ Config: &repo_model.ExternalWikiConfig{
+ ExternalWikiURL: form.ExternalWikiURL,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeWiki)
+ } else if form.EnableWiki && !form.EnableExternalWiki && !unit_model.TypeWiki.UnitGlobalDisabled() {
+ var wikiPermissions repo_model.UnitAccessMode
+ if form.GloballyWriteableWiki {
+ wikiPermissions = repo_model.UnitAccessModeWrite
+ } else {
+ wikiPermissions = repo_model.UnitAccessModeRead
+ }
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeWiki,
+ Config: new(repo_model.UnitConfig),
+ DefaultPermissions: wikiPermissions,
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalWiki)
+ } else {
+ if !unit_model.TypeExternalWiki.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalWiki)
+ }
+ if !unit_model.TypeWiki.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeWiki)
+ }
+ }
+
+ if form.EnableIssues && form.EnableExternalTracker && !unit_model.TypeExternalTracker.UnitGlobalDisabled() {
+ if !validation.IsValidExternalURL(form.ExternalTrackerURL) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.external_tracker_url_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+ if len(form.TrackerURLFormat) != 0 && !validation.IsValidExternalTrackerURLFormat(form.TrackerURLFormat) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.tracker_url_format_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeExternalTracker,
+ Config: &repo_model.ExternalTrackerConfig{
+ ExternalTrackerURL: form.ExternalTrackerURL,
+ ExternalTrackerFormat: form.TrackerURLFormat,
+ ExternalTrackerStyle: form.TrackerIssueStyle,
+ ExternalTrackerRegexpPattern: form.ExternalTrackerRegexpPattern,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeIssues)
+ } else if form.EnableIssues && !form.EnableExternalTracker && !unit_model.TypeIssues.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeIssues,
+ Config: &repo_model.IssuesConfig{
+ EnableTimetracker: form.EnableTimetracker,
+ AllowOnlyContributorsToTrackTime: form.AllowOnlyContributorsToTrackTime,
+ EnableDependencies: form.EnableIssueDependencies,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalTracker)
+ } else {
+ if !unit_model.TypeExternalTracker.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalTracker)
+ }
+ if !unit_model.TypeIssues.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeIssues)
+ }
+ }
+
+ if form.EnableProjects && !unit_model.TypeProjects.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeProjects,
+ })
+ } else if !unit_model.TypeProjects.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeProjects)
+ }
+
+ if form.EnableReleases && !unit_model.TypeReleases.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeReleases,
+ })
+ } else if !unit_model.TypeReleases.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeReleases)
+ }
+
+ if form.EnablePackages && !unit_model.TypePackages.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypePackages,
+ })
+ } else if !unit_model.TypePackages.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePackages)
+ }
+
+ if form.EnableActions && !unit_model.TypeActions.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeActions,
+ })
+ } else if !unit_model.TypeActions.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeActions)
+ }
+
+ if form.EnablePulls && !unit_model.TypePullRequests.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypePullRequests,
+ Config: &repo_model.PullRequestsConfig{
+ IgnoreWhitespaceConflicts: form.PullsIgnoreWhitespace,
+ AllowMerge: form.PullsAllowMerge,
+ AllowRebase: form.PullsAllowRebase,
+ AllowRebaseMerge: form.PullsAllowRebaseMerge,
+ AllowSquash: form.PullsAllowSquash,
+ AllowFastForwardOnly: form.PullsAllowFastForwardOnly,
+ AllowManualMerge: form.PullsAllowManualMerge,
+ AutodetectManualMerge: form.EnableAutodetectManualMerge,
+ AllowRebaseUpdate: form.PullsAllowRebaseUpdate,
+ DefaultDeleteBranchAfterMerge: form.DefaultDeleteBranchAfterMerge,
+ DefaultMergeStyle: repo_model.MergeStyle(form.PullsDefaultMergeStyle),
+ DefaultAllowMaintainerEdit: form.DefaultAllowMaintainerEdit,
+ },
+ })
+ } else if !unit_model.TypePullRequests.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePullRequests)
+ }
+
+ if len(units) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.update_settings_no_unit"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/units")
+ return
+ }
+
+ if err := repo_service.UpdateRepositoryUnits(ctx, repo, units, deleteUnitTypes); err != nil {
+ ctx.ServerError("UpdateRepositoryUnits", err)
+ return
+ }
+ if repoChanged {
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ }
+ log.Trace("Repository advanced settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/units")
+}
+
+// Settings show a repository's settings page
+func Settings(ctx *context.Context) {
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+}
+
+// SettingsPost response for changes of a repository
+func SettingsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RepoSettingForm)
+
+ ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+ ctx.Data["DisableNewPushMirrors"] = setting.Mirror.DisableNewPush
+ ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
+ ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
+
+ signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath())
+ ctx.Data["SigningKeyAvailable"] = len(signing) > 0
+ ctx.Data["SigningSettings"] = setting.Repository.Signing
+ ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ repo := ctx.Repo.Repository
+
+ switch ctx.FormString("action") {
+ case "update":
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+ return
+ }
+
+ newRepoName := form.RepoName
+ // Check if repository name has been changed.
+ if repo.LowerName != strings.ToLower(newRepoName) {
+ // Close the GitRepo if open
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+ if err := repo_service.ChangeRepositoryName(ctx, ctx.Doer, repo, newRepoName); err != nil {
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tplSettingsOptions, &form)
+ case db.IsErrNameReserved(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplSettingsOptions, &form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tplSettingsOptions, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tplSettingsOptions, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tplSettingsOptions, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tplSettingsOptions, form)
+ }
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplSettingsOptions, &form)
+ default:
+ ctx.ServerError("ChangeRepositoryName", err)
+ }
+ return
+ }
+
+ log.Trace("Repository name changed: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newRepoName)
+ }
+ // In case it's just a case change.
+ repo.Name = newRepoName
+ repo.LowerName = strings.ToLower(newRepoName)
+ repo.Description = form.Description
+ repo.Website = form.Website
+ repo.IsTemplate = form.Template
+
+ // Visibility of forked repository is forced sync with base repository.
+ if repo.IsFork {
+ form.Private = repo.BaseRepo.IsPrivate || repo.BaseRepo.Owner.Visibility == structs.VisibleTypePrivate
+ }
+
+ visibilityChanged := repo.IsPrivate != form.Private
+ // when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public
+ if visibilityChanged && setting.Repository.ForcePrivate && !form.Private && !ctx.Doer.IsAdmin {
+ ctx.RenderWithErr(ctx.Tr("form.repository_force_private"), tplSettingsOptions, form)
+ return
+ }
+
+ repo.IsPrivate = form.Private
+ if err := repo_service.UpdateRepository(ctx, repo, visibilityChanged); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ log.Trace("Repository basic settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "federation":
+ if !setting.Federation.Enabled {
+ ctx.NotFound("", nil)
+ ctx.Flash.Info(ctx.Tr("repo.settings.federation_not_enabled"))
+ return
+ }
+ followingRepos := strings.TrimSpace(form.FollowingRepos)
+ followingRepos = strings.TrimSuffix(followingRepos, ";")
+
+ maxFollowingRepoStrLength := 2048
+ errs := validation.ValidateMaxLen(followingRepos, maxFollowingRepoStrLength, "federationRepos")
+ if len(errs) > 0 {
+ ctx.Data["ERR_FollowingRepos"] = true
+ ctx.Flash.Error(ctx.Tr("repo.form.string_too_long", maxFollowingRepoStrLength))
+ ctx.Redirect(repo.Link() + "/settings")
+ return
+ }
+
+ federationRepoSplit := []string{}
+ if followingRepos != "" {
+ federationRepoSplit = strings.Split(followingRepos, ";")
+ }
+ for idx, repo := range federationRepoSplit {
+ federationRepoSplit[idx] = strings.TrimSpace(repo)
+ }
+
+ if _, _, err := federation.StoreFollowingRepoList(ctx, ctx.Repo.Repository.ID, federationRepoSplit); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "mirror":
+ if !setting.Mirror.Enabled || !repo.IsMirror || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ pullMirror, err := repo_model.GetMirrorByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err == repo_model.ErrMirrorNotExist {
+ ctx.NotFound("", nil)
+ return
+ }
+ if err != nil {
+ ctx.ServerError("GetMirrorByRepoID", err)
+ return
+ }
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.Interval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.Data["Err_Interval"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
+ return
+ }
+
+ pullMirror.EnablePrune = form.EnablePrune
+ pullMirror.Interval = interval
+ pullMirror.ScheduleNextUpdate()
+ if err := repo_model.UpdateMirror(ctx, pullMirror); err != nil {
+ ctx.ServerError("UpdateMirror", err)
+ return
+ }
+
+ u, err := git.GetRemoteURL(ctx, ctx.Repo.Repository.RepoPath(), pullMirror.GetRemoteName())
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ if u.User != nil && form.MirrorPassword == "" && form.MirrorUsername == u.User.Username() {
+ form.MirrorPassword, _ = u.User.Password()
+ }
+
+ address, err := forms.ParseRemoteAddr(form.MirrorAddress, form.MirrorUsername, form.MirrorPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(address, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ if err := mirror_service.UpdateAddress(ctx, pullMirror, address); err != nil {
+ ctx.ServerError("UpdateAddress", err)
+ return
+ }
+ remoteAddress, err := util.SanitizeURL(address)
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ pullMirror.RemoteAddress = remoteAddress
+
+ form.LFS = form.LFS && setting.LFS.StartServer
+
+ if len(form.LFSEndpoint) > 0 {
+ ep := lfs.DetermineEndpoint("", form.LFSEndpoint)
+ if ep == nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tplSettingsOptions, &form)
+ return
+ }
+ err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer)
+ if err != nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ }
+
+ pullMirror.LFS = form.LFS
+ pullMirror.LFSEndpoint = form.LFSEndpoint
+ if err := repo_model.UpdateMirror(ctx, pullMirror); err != nil {
+ ctx.ServerError("UpdateMirror", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "mirror-sync":
+ if !setting.Mirror.Enabled || !repo.IsMirror || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ok, err := quota_model.EvaluateForUser(ctx, repo.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ ctx.RenderWithErr(ctx.Tr("repo.settings.pull_mirror_sync_quota_exceeded"), tplSettingsOptions, &form)
+ return
+ }
+
+ mirror_service.AddPullMirrorToQueue(repo.ID)
+
+ ctx.Flash.Info(ctx.Tr("repo.settings.pull_mirror_sync_in_progress", repo.OriginalURL))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-sync":
+ if !setting.Mirror.Enabled {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ m, err := selectPushMirrorByForm(ctx, form, repo)
+ if err != nil {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ mirror_service.AddPushMirrorToQueue(m.ID)
+
+ ctx.Flash.Info(ctx.Tr("repo.settings.push_mirror_sync_in_progress", m.RemoteAddress))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-update":
+ if !setting.Mirror.Enabled || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.PushMirrorInterval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &forms.RepoSettingForm{})
+ return
+ }
+
+ id, err := strconv.ParseInt(form.PushMirrorID, 10, 64)
+ if err != nil {
+ ctx.ServerError("UpdatePushMirrorIntervalPushMirrorID", err)
+ return
+ }
+ m := &repo_model.PushMirror{
+ ID: id,
+ Interval: interval,
+ }
+ if err := repo_model.UpdatePushMirrorInterval(ctx, m); err != nil {
+ ctx.ServerError("UpdatePushMirrorInterval", err)
+ return
+ }
+ // Background why we are adding it to Queue
+ // If we observed its implementation in the context of `push-mirror-sync` where it
+ // is evident that pushing to the queue is necessary for updates.
+ // So, there are updates within the given interval, it is necessary to update the queue accordingly.
+ mirror_service.AddPushMirrorToQueue(m.ID)
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-remove":
+ if !setting.Mirror.Enabled || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ m, err := selectPushMirrorByForm(ctx, form, repo)
+ if err != nil {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ if err = mirror_service.RemovePushMirrorRemote(ctx, m); err != nil {
+ ctx.ServerError("RemovePushMirrorRemote", err)
+ return
+ }
+
+ if err = repo_model.DeletePushMirrors(ctx, repo_model.PushMirrorOptions{ID: m.ID, RepoID: m.RepoID}); err != nil {
+ ctx.ServerError("DeletePushMirrorByID", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-add":
+ if setting.Mirror.DisableNewPush || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.PushMirrorInterval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.Data["Err_PushMirrorInterval"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
+ return
+ }
+
+ if form.PushMirrorUseSSH && (form.PushMirrorUsername != "" || form.PushMirrorPassword != "") {
+ ctx.Data["Err_PushMirrorUseSSH"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_denied_combination"), tplSettingsOptions, &form)
+ return
+ }
+
+ if form.PushMirrorUseSSH && !git.HasSSHExecutable {
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_use_ssh.not_available"), tplSettingsOptions, &form)
+ return
+ }
+
+ address, err := forms.ParseRemoteAddr(form.PushMirrorAddress, form.PushMirrorUsername, form.PushMirrorPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(address, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_PushMirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ remoteSuffix, err := util.CryptoRandomString(10)
+ if err != nil {
+ ctx.ServerError("RandomString", err)
+ return
+ }
+
+ remoteAddress, err := util.SanitizeURL(address)
+ if err != nil {
+ ctx.Data["Err_PushMirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ m := &repo_model.PushMirror{
+ RepoID: repo.ID,
+ Repo: repo,
+ RemoteName: fmt.Sprintf("remote_mirror_%s", remoteSuffix),
+ SyncOnCommit: form.PushMirrorSyncOnCommit,
+ Interval: interval,
+ RemoteAddress: remoteAddress,
+ }
+
+ var plainPrivateKey []byte
+ if form.PushMirrorUseSSH {
+ publicKey, privateKey, err := util.GenerateSSHKeypair()
+ if err != nil {
+ ctx.ServerError("GenerateSSHKeypair", err)
+ return
+ }
+ plainPrivateKey = privateKey
+ m.PublicKey = string(publicKey)
+ }
+
+ if err := db.Insert(ctx, m); err != nil {
+ ctx.ServerError("InsertPushMirror", err)
+ return
+ }
+
+ if form.PushMirrorUseSSH {
+ if err := m.SetPrivatekey(ctx, plainPrivateKey); err != nil {
+ ctx.ServerError("SetPrivatekey", err)
+ return
+ }
+ }
+
+ if err := mirror_service.AddPushMirrorRemote(ctx, m, address); err != nil {
+ if err := repo_model.DeletePushMirrors(ctx, repo_model.PushMirrorOptions{ID: m.ID, RepoID: m.RepoID}); err != nil {
+ log.Error("DeletePushMirrors %v", err)
+ }
+ ctx.ServerError("AddPushMirrorRemote", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "signing":
+ changed := false
+ trustModel := repo_model.ToTrustModel(form.TrustModel)
+ if trustModel != repo.TrustModel {
+ repo.TrustModel = trustModel
+ changed = true
+ }
+
+ if changed {
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ }
+ log.Trace("Repository signing settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "admin":
+ if !ctx.Doer.IsAdmin {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if repo.IsFsckEnabled != form.EnableHealthCheck {
+ repo.IsFsckEnabled = form.EnableHealthCheck
+ }
+
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+
+ log.Trace("Repository admin settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "admin_index":
+ if !ctx.Doer.IsAdmin {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ switch form.RequestReindexType {
+ case "stats":
+ if err := stats.UpdateRepoIndexer(ctx.Repo.Repository); err != nil {
+ ctx.ServerError("UpdateStatsRepondexer", err)
+ return
+ }
+ case "code":
+ if !setting.Indexer.RepoIndexerEnabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ code.UpdateRepoIndexer(ctx.Repo.Repository)
+ default:
+ ctx.NotFound("", nil)
+ return
+ }
+
+ log.Trace("Repository reindex for %s requested: %s/%s", form.RequestReindexType, ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.reindex_requested"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "convert":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if !repo.IsMirror {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ repo.IsMirror = false
+
+ if _, err := repo_service.CleanUpMigrateInfo(ctx, repo); err != nil {
+ ctx.ServerError("CleanUpMigrateInfo", err)
+ return
+ } else if err = repo_model.DeleteMirrorByRepoID(ctx, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("DeleteMirrorByRepoID", err)
+ return
+ }
+ log.Trace("Repository converted from mirror to regular: %s", repo.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.convert_succeed"))
+ ctx.Redirect(repo.Link())
+
+ case "convert_fork":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if err := repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("Convert Fork", err)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if !repo.IsFork {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ if !ctx.Repo.Owner.CanCreateRepo() {
+ maxCreationLimit := ctx.Repo.Owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.Flash.Error(msg)
+ ctx.Redirect(repo.Link() + "/settings")
+ return
+ }
+
+ if err := repo_service.ConvertForkToNormalRepository(ctx, repo); err != nil {
+ log.Error("Unable to convert repository %-v from fork. Error: %v", repo, err)
+ ctx.ServerError("Convert Fork", err)
+ return
+ }
+
+ log.Trace("Repository converted from fork to regular: %s", repo.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.convert_fork_succeed"))
+ ctx.Redirect(repo.Link())
+
+ case "transfer":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ newOwner, err := user_model.GetUserByName(ctx, ctx.FormString("new_owner_name"))
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
+ return
+ }
+ ctx.ServerError("IsUserExist", err)
+ return
+ }
+
+ if newOwner.Type == user_model.UserTypeOrganization {
+ if !ctx.Doer.IsAdmin && newOwner.Visibility == structs.VisibleTypePrivate && !organization.OrgFromUser(newOwner).HasMemberWithUserID(ctx, ctx.Doer.ID) {
+ // The user shouldn't know about this organization
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
+ return
+ }
+ }
+
+ // Check the quota of the new owner
+ ok, err := quota_model.EvaluateForUser(ctx, newOwner.ID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.transfer_quota_exceeded", newOwner.Name), tplSettingsOptions, &form)
+ return
+ }
+
+ // Close the GitRepo if open
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+
+ oldFullname := repo.FullName()
+ if err := repo_service.StartRepositoryTransfer(ctx, ctx.Doer, newOwner, repo, nil); err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_blocked_doer"), tplSettingsOptions, nil)
+ } else if repo_model.IsErrRepoAlreadyExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplSettingsOptions, nil)
+ } else if models.IsErrRepoTransferInProgress(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.transfer_in_progress"), tplSettingsOptions, nil)
+ } else {
+ ctx.ServerError("TransferOwnership", err)
+ }
+
+ return
+ }
+
+ if ctx.Repo.Repository.Status == repo_model.RepositoryPendingTransfer {
+ log.Trace("Repository transfer process was started: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newOwner)
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_started", newOwner.DisplayName()))
+ } else {
+ log.Trace("Repository transferred: %s -> %s", oldFullname, ctx.Repo.Repository.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_succeed"))
+ }
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "cancel_transfer":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ repoTransfer, err := models.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
+ if err != nil {
+ if models.IsErrNoPendingTransfer(err) {
+ ctx.Flash.Error("repo.settings.transfer_abort_invalid")
+ ctx.Redirect(repo.Link() + "/settings")
+ } else {
+ ctx.ServerError("GetPendingRepositoryTransfer", err)
+ }
+
+ return
+ }
+
+ if err := repoTransfer.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadRecipient", err)
+ return
+ }
+
+ if err := repo_service.CancelRepositoryTransfer(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("CancelRepositoryTransfer", err)
+ return
+ }
+
+ log.Trace("Repository transfer process was cancelled: %s/%s ", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_abort_success", repoTransfer.Recipient.Name))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "delete":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ // Close the gitrepository before doing this.
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ }
+
+ if err := repo_service.DeleteRepository(ctx, ctx.Doer, ctx.Repo.Repository, true); err != nil {
+ ctx.ServerError("DeleteRepository", err)
+ return
+ }
+ log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success"))
+ ctx.Redirect(ctx.Repo.Owner.DashboardLink())
+
+ case "delete-wiki":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ err := wiki_service.DeleteWiki(ctx, repo)
+ if err != nil {
+ log.Error("Delete Wiki: %v", err.Error())
+ }
+ log.Trace("Repository wiki deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.wiki_deletion_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "rename-wiki-branch":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if err := wiki_service.NormalizeWikiBranch(ctx, repo, setting.Repository.DefaultBranch); err != nil {
+ log.Error("Normalize Wiki branch: %v", err.Error())
+ ctx.Flash.Error(ctx.Tr("repo.settings.wiki_branch_rename_failure"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+ log.Trace("Repository wiki normalized: %s#%s", repo.FullName(), setting.Repository.DefaultBranch)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.wiki_branch_rename_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "archive":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if repo.IsMirror {
+ ctx.Flash.Error(ctx.Tr("repo.settings.archive.error_ismirror"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if err := repo_model.SetArchiveRepoState(ctx, repo, true); err != nil {
+ log.Error("Tried to archive a repo: %s", err)
+ ctx.Flash.Error(ctx.Tr("repo.settings.archive.error"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.archive.success"))
+
+ log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "unarchive":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if err := repo_model.SetArchiveRepoState(ctx, repo, false); err != nil {
+ log.Error("Tried to unarchive a repo: %s", err)
+ ctx.Flash.Error(ctx.Tr("repo.settings.unarchive.error"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if ctx.Repo.Repository.UnitEnabled(ctx, unit_model.TypeActions) {
+ if err := actions_service.DetectAndHandleSchedules(ctx, repo); err != nil {
+ log.Error("DetectAndHandleSchedules for un-archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.unarchive.success"))
+
+ log.Trace("Repository was un-archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ default:
+ ctx.NotFound("", nil)
+ }
+}
+
+func handleSettingRemoteAddrError(ctx *context.Context, err error, form *forms.RepoSettingForm) {
+ if models.IsErrInvalidCloneAddr(err) {
+ addrErr := err.(*models.ErrInvalidCloneAddr)
+ switch {
+ case addrErr.IsProtocolInvalid:
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_protocol_invalid"), tplSettingsOptions, form)
+ case addrErr.IsURLError:
+ ctx.RenderWithErr(ctx.Tr("form.url_error", addrErr.Host), tplSettingsOptions, form)
+ case addrErr.IsPermissionDenied:
+ if addrErr.LocalPath {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied"), tplSettingsOptions, form)
+ } else {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied_blocked"), tplSettingsOptions, form)
+ }
+ case addrErr.IsInvalidPath:
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_local_path"), tplSettingsOptions, form)
+ default:
+ ctx.ServerError("Unknown error", err)
+ }
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, form)
+}
+
+func selectPushMirrorByForm(ctx *context.Context, form *forms.RepoSettingForm, repo *repo_model.Repository) (*repo_model.PushMirror, error) {
+ id, err := strconv.ParseInt(form.PushMirrorID, 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, repo.ID, db.ListOptions{})
+ if err != nil {
+ return nil, err
+ }
+
+ for _, m := range pushMirrors {
+ if m.ID == id {
+ m.Repo = repo
+ return m, nil
+ }
+ }
+
+ return nil, fmt.Errorf("PushMirror[%v] not associated to repository %v", id, repo)
+}
diff --git a/routers/web/repo/setting/settings_test.go b/routers/web/repo/setting/settings_test.go
new file mode 100644
index 0000000..0c8553f
--- /dev/null
+++ b/routers/web/repo/setting/settings_test.go
@@ -0,0 +1,412 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+ "testing"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func createSSHAuthorizedKeysTmpPath(t *testing.T) func() {
+ tmpDir := t.TempDir()
+
+ oldPath := setting.SSH.RootPath
+ setting.SSH.RootPath = tmpDir
+
+ return func() {
+ setting.SSH.RootPath = oldPath
+ }
+}
+
+func TestAddReadOnlyDeployKey(t *testing.T) {
+ if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
+ defer deferable()
+ } else {
+ return
+ }
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/settings/keys")
+
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+
+ addKeyForm := forms.AddKeyForm{
+ Title: "read-only",
+ Content: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
+ }
+ web.SetForm(ctx, &addKeyForm)
+ DeployKeysPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
+ Name: addKeyForm.Title,
+ Content: addKeyForm.Content,
+ Mode: perm.AccessModeRead,
+ })
+}
+
+func TestAddReadWriteOnlyDeployKey(t *testing.T) {
+ if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
+ defer deferable()
+ } else {
+ return
+ }
+
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/settings/keys")
+
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+
+ addKeyForm := forms.AddKeyForm{
+ Title: "read-write",
+ Content: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
+ IsWritable: true,
+ }
+ web.SetForm(ctx, &addKeyForm)
+ DeployKeysPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
+ Name: addKeyForm.Title,
+ Content: addKeyForm.Content,
+ Mode: perm.AccessModeWrite,
+ })
+}
+
+func TestCollaborationPost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 4)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user4")
+
+ u := &user_model.User{
+ ID: 2,
+ LowerName: "user2",
+ Type: user_model.UserTypeIndividual,
+ }
+
+ re := &repo_model.Repository{
+ ID: 2,
+ Owner: u,
+ OwnerID: u.ID,
+ }
+
+ repo := &context.Repository{
+ Owner: u,
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
+ require.NoError(t, err)
+ assert.True(t, exists)
+}
+
+func TestCollaborationPost_InactiveUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 9)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user9")
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ LowerName: "user2",
+ },
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 4)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user4")
+
+ u := &user_model.User{
+ ID: 2,
+ LowerName: "user2",
+ Type: user_model.UserTypeIndividual,
+ }
+
+ re := &repo_model.Repository{
+ ID: 2,
+ Owner: u,
+ OwnerID: u.ID,
+ }
+
+ repo := &context.Repository{
+ Owner: u,
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
+ require.NoError(t, err)
+ assert.True(t, exists)
+
+ // Try adding the same collaborator again
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestCollaborationPost_NonExistentUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user34")
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ LowerName: "user2",
+ },
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Empty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_NotAllowed(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: false,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ assert.False(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_AddTeamTwice(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ AddTeamPost(ctx)
+ assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_NonExistentTeam(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team-non-existent")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestDeleteTeam(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org3/team1/repo3")
+
+ ctx.Req.Form.Set("id", "2")
+
+ org := &user_model.User{
+ LowerName: "org3",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 2,
+ OrgID: 3,
+ }
+
+ re := &repo_model.Repository{
+ ID: 3,
+ Owner: org,
+ OwnerID: 3,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 3,
+ LowerName: "org3",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ DeleteTeam(ctx)
+
+ assert.False(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+}
diff --git a/routers/web/repo/setting/variables.go b/routers/web/repo/setting/variables.go
new file mode 100644
index 0000000..45b6c0f
--- /dev/null
+++ b/routers/web/repo/setting/variables.go
@@ -0,0 +1,140 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared "code.gitea.io/gitea/routers/web/shared/actions"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplRepoVariables base.TplName = "repo/settings/actions"
+ tplOrgVariables base.TplName = "org/settings/actions"
+ tplUserVariables base.TplName = "user/settings/actions"
+ tplAdminVariables base.TplName = "admin/actions"
+)
+
+type variablesCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsUser bool
+ IsGlobal bool
+ VariablesTemplate base.TplName
+ RedirectLink string
+}
+
+func getVariablesCtx(ctx *context.Context) (*variablesCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &variablesCtx{
+ OwnerID: 0,
+ RepoID: ctx.Repo.Repository.ID,
+ IsRepo: true,
+ VariablesTemplate: tplRepoVariables,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &variablesCtx{
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: 0,
+ IsOrg: true,
+ VariablesTemplate: tplOrgVariables,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &variablesCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ VariablesTemplate: tplUserVariables,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &variablesCtx{
+ OwnerID: 0,
+ RepoID: 0,
+ IsGlobal: true,
+ VariablesTemplate: tplAdminVariables,
+ RedirectLink: setting.AppSubURL + "/admin/actions/variables",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Variables context")
+}
+
+func Variables(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.variables")
+ ctx.Data["PageType"] = "variables"
+ ctx.Data["PageIsSharedSettingsVariables"] = true
+
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ shared.SetVariablesContext(ctx, vCtx.OwnerID, vCtx.RepoID)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, vCtx.VariablesTemplate)
+}
+
+func VariableCreate(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ if ctx.HasError() { // form binding validation error
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.CreateVariable(ctx, vCtx.OwnerID, vCtx.RepoID, vCtx.RedirectLink)
+}
+
+func VariableUpdate(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ if ctx.HasError() { // form binding validation error
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.UpdateVariable(ctx, vCtx.RedirectLink)
+}
+
+func VariableDelete(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+ shared.DeleteVariable(ctx, vCtx.RedirectLink)
+}
diff --git a/routers/web/repo/setting/webhook.go b/routers/web/repo/setting/webhook.go
new file mode 100644
index 0000000..eee493e
--- /dev/null
+++ b/routers/web/repo/setting/webhook.go
@@ -0,0 +1,485 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "path"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ webhook_service "code.gitea.io/gitea/services/webhook"
+
+ "gitea.com/go-chi/binding"
+)
+
+const (
+ tplHooks base.TplName = "repo/settings/webhook/base"
+ tplHookNew base.TplName = "repo/settings/webhook/new"
+ tplOrgHookNew base.TplName = "org/settings/hook_new"
+ tplUserHookNew base.TplName = "user/settings/hook_new"
+ tplAdminHookNew base.TplName = "admin/hook_new"
+)
+
+// WebhookList render web hooks list page
+func WebhookList(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.hooks")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["BaseLink"] = ctx.Repo.RepoLink + "/settings/hooks"
+ ctx.Data["BaseLinkNew"] = ctx.Repo.RepoLink + "/settings/hooks"
+ ctx.Data["WebhookList"] = webhook_service.List()
+ ctx.Data["Description"] = ctx.Tr("repo.settings.hooks_desc", "https://forgejo.org/docs/latest/user/webhooks/")
+
+ ws, err := db.Find[webhook.Webhook](ctx, webhook.ListWebhookOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("GetWebhooksByRepoID", err)
+ return
+ }
+ ctx.Data["Webhooks"] = ws
+
+ ctx.HTML(http.StatusOK, tplHooks)
+}
+
+type ownerRepoCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsAdmin bool
+ IsSystemWebhook bool
+ Link string
+ LinkNew string
+ NewTemplate base.TplName
+}
+
+// getOwnerRepoCtx determines whether this is a repo, owner, or admin (both default and system) context.
+func getOwnerRepoCtx(ctx *context.Context) (*ownerRepoCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &ownerRepoCtx{
+ RepoID: ctx.Repo.Repository.ID,
+ Link: path.Join(ctx.Repo.RepoLink, "settings/hooks"),
+ LinkNew: path.Join(ctx.Repo.RepoLink, "settings/hooks"),
+ NewTemplate: tplHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ return &ownerRepoCtx{
+ OwnerID: ctx.ContextUser.ID,
+ Link: path.Join(ctx.Org.OrgLink, "settings/hooks"),
+ LinkNew: path.Join(ctx.Org.OrgLink, "settings/hooks"),
+ NewTemplate: tplOrgHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &ownerRepoCtx{
+ OwnerID: ctx.Doer.ID,
+ Link: path.Join(setting.AppSubURL, "/user/settings/hooks"),
+ LinkNew: path.Join(setting.AppSubURL, "/user/settings/hooks"),
+ NewTemplate: tplUserHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &ownerRepoCtx{
+ IsAdmin: true,
+ IsSystemWebhook: ctx.Params(":configType") == "system-hooks",
+ Link: path.Join(setting.AppSubURL, "/admin/hooks"),
+ LinkNew: path.Join(setting.AppSubURL, "/admin/", ctx.Params(":configType")),
+ NewTemplate: tplAdminHookNew,
+ }, nil
+ }
+
+ return nil, errors.New("unable to set OwnerRepo context")
+}
+
+// WebhookNew render creating webhook page
+func WebhookNew(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.add_webhook")
+ ctx.Data["Webhook"] = webhook.Webhook{HookEvent: &webhook_module.HookEvent{}}
+
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return
+ }
+
+ if orCtx.IsAdmin && orCtx.IsSystemWebhook {
+ ctx.Data["PageIsAdminSystemHooks"] = true
+ ctx.Data["PageIsAdminSystemHooksNew"] = true
+ } else if orCtx.IsAdmin {
+ ctx.Data["PageIsAdminDefaultHooks"] = true
+ ctx.Data["PageIsAdminDefaultHooksNew"] = true
+ } else {
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksNew"] = true
+ }
+
+ hookType := ctx.Params(":type")
+ handler := webhook_service.GetWebhookHandler(hookType)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+ ctx.Data["HookType"] = hookType
+ ctx.Data["WebhookHandler"] = handler
+ ctx.Data["BaseLink"] = orCtx.LinkNew
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ ctx.HTML(http.StatusOK, orCtx.NewTemplate)
+}
+
+// ParseHookEvent convert web form content to webhook.HookEvent
+func ParseHookEvent(form forms.WebhookCoreForm) *webhook_module.HookEvent {
+ return &webhook_module.HookEvent{
+ PushOnly: form.PushOnly(),
+ SendEverything: form.SendEverything(),
+ ChooseEvents: form.ChooseEvents(),
+ HookEvents: webhook_module.HookEvents{
+ Create: form.Create,
+ Delete: form.Delete,
+ Fork: form.Fork,
+ Issues: form.Issues,
+ IssueAssign: form.IssueAssign,
+ IssueLabel: form.IssueLabel,
+ IssueMilestone: form.IssueMilestone,
+ IssueComment: form.IssueComment,
+ Release: form.Release,
+ Push: form.Push,
+ PullRequest: form.PullRequest,
+ PullRequestAssign: form.PullRequestAssign,
+ PullRequestLabel: form.PullRequestLabel,
+ PullRequestMilestone: form.PullRequestMilestone,
+ PullRequestComment: form.PullRequestComment,
+ PullRequestReview: form.PullRequestReview,
+ PullRequestSync: form.PullRequestSync,
+ PullRequestReviewRequest: form.PullRequestReviewRequest,
+ Wiki: form.Wiki,
+ Repository: form.Repository,
+ Package: form.Package,
+ },
+ BranchFilter: form.BranchFilter,
+ }
+}
+
+func WebhookCreate(ctx *context.Context) {
+ hookType := ctx.Params(":type")
+ handler := webhook_service.GetWebhookHandler(hookType)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+
+ fields := handler.UnmarshalForm(func(form any) {
+ errs := binding.Bind(ctx.Req, form)
+ middleware.Validate(errs, ctx.Data, form, ctx.Locale) // error checked below in ctx.HasError
+ })
+
+ ctx.Data["Title"] = ctx.Tr("repo.settings.add_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksNew"] = true
+ ctx.Data["Webhook"] = webhook.Webhook{HookEvent: &webhook_module.HookEvent{}}
+ ctx.Data["HookType"] = hookType
+ ctx.Data["WebhookHandler"] = handler
+
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return
+ }
+ ctx.Data["BaseLink"] = orCtx.LinkNew
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ if ctx.HasError() {
+ // pre-fill the form with the submitted data
+ var w webhook.Webhook
+ w.URL = fields.URL
+ w.ContentType = fields.ContentType
+ w.Secret = fields.Secret
+ w.HookEvent = ParseHookEvent(fields.WebhookCoreForm)
+ w.IsActive = fields.Active
+ w.HTTPMethod = fields.HTTPMethod
+ err := w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+ ctx.Data["Webhook"] = w
+ ctx.Data["HookMetadata"] = fields.Metadata
+
+ ctx.HTML(http.StatusUnprocessableEntity, orCtx.NewTemplate)
+ return
+ }
+
+ var meta []byte
+ if fields.Metadata != nil {
+ meta, err = json.Marshal(fields.Metadata)
+ if err != nil {
+ ctx.ServerError("Marshal", err)
+ return
+ }
+ }
+
+ w := &webhook.Webhook{
+ RepoID: orCtx.RepoID,
+ URL: fields.URL,
+ HTTPMethod: fields.HTTPMethod,
+ ContentType: fields.ContentType,
+ Secret: fields.Secret,
+ HookEvent: ParseHookEvent(fields.WebhookCoreForm),
+ IsActive: fields.Active,
+ Type: hookType,
+ Meta: string(meta),
+ OwnerID: orCtx.OwnerID,
+ IsSystemWebhook: orCtx.IsSystemWebhook,
+ }
+ err = w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+ if err := w.UpdateEvent(); err != nil {
+ ctx.ServerError("UpdateEvent", err)
+ return
+ } else if err := webhook.CreateWebhook(ctx, w); err != nil {
+ ctx.ServerError("CreateWebhook", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_hook_success"))
+ ctx.Redirect(orCtx.Link)
+}
+
+func WebhookUpdate(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.update_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksEdit"] = true
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Webhook"] = w
+
+ handler := webhook_service.GetWebhookHandler(w.Type)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+
+ fields := handler.UnmarshalForm(func(form any) {
+ errs := binding.Bind(ctx.Req, form)
+ middleware.Validate(errs, ctx.Data, form, ctx.Locale) // error checked below in ctx.HasError
+ })
+
+ // pre-fill the form with the submitted data
+ w.URL = fields.URL
+ w.ContentType = fields.ContentType
+ w.Secret = fields.Secret
+ w.HookEvent = ParseHookEvent(fields.WebhookCoreForm)
+ w.IsActive = fields.Active
+ w.HTTPMethod = fields.HTTPMethod
+
+ err := w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Data["HookMetadata"] = fields.Metadata
+ ctx.HTML(http.StatusUnprocessableEntity, orCtx.NewTemplate)
+ return
+ }
+
+ var meta []byte
+ if fields.Metadata != nil {
+ meta, err = json.Marshal(fields.Metadata)
+ if err != nil {
+ ctx.ServerError("Marshal", err)
+ return
+ }
+ }
+
+ w.Meta = string(meta)
+
+ if err := w.UpdateEvent(); err != nil {
+ ctx.ServerError("UpdateEvent", err)
+ return
+ } else if err := webhook.UpdateWebhook(ctx, w); err != nil {
+ ctx.ServerError("UpdateWebhook", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_hook_success"))
+ ctx.Redirect(fmt.Sprintf("%s/%d", orCtx.Link, w.ID))
+}
+
+func checkWebhook(ctx *context.Context) (*ownerRepoCtx, *webhook.Webhook) {
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return nil, nil
+ }
+ ctx.Data["BaseLink"] = orCtx.Link
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ var w *webhook.Webhook
+ if orCtx.RepoID > 0 {
+ w, err = webhook.GetWebhookByRepoID(ctx, orCtx.RepoID, ctx.ParamsInt64(":id"))
+ } else if orCtx.OwnerID > 0 {
+ w, err = webhook.GetWebhookByOwnerID(ctx, orCtx.OwnerID, ctx.ParamsInt64(":id"))
+ } else if orCtx.IsAdmin {
+ w, err = webhook.GetSystemOrDefaultWebhook(ctx, ctx.ParamsInt64(":id"))
+ }
+ if err != nil || w == nil {
+ if webhook.IsErrWebhookNotExist(err) {
+ ctx.NotFound("GetWebhookByID", nil)
+ } else {
+ ctx.ServerError("GetWebhookByID", err)
+ }
+ return nil, nil
+ }
+
+ ctx.Data["HookType"] = w.Type
+
+ if handler := webhook_service.GetWebhookHandler(w.Type); handler != nil {
+ ctx.Data["HookMetadata"] = handler.Metadata(w)
+ ctx.Data["WebhookHandler"] = handler
+ }
+
+ ctx.Data["History"], err = w.History(ctx, 1)
+ if err != nil {
+ ctx.ServerError("History", err)
+ }
+ return orCtx, w
+}
+
+// WebhookEdit render editing web hook page
+func WebhookEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.update_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksEdit"] = true
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Webhook"] = w
+
+ ctx.HTML(http.StatusOK, orCtx.NewTemplate)
+}
+
+// WebhookTest test if web hook is work fine
+func WebhookTest(ctx *context.Context) {
+ hookID := ctx.ParamsInt64(":id")
+ w, err := webhook.GetWebhookByRepoID(ctx, ctx.Repo.Repository.ID, hookID)
+ if err != nil {
+ ctx.Flash.Error("GetWebhookByRepoID: " + err.Error())
+ ctx.Status(http.StatusInternalServerError)
+ return
+ }
+
+ // Grab latest commit or fake one if it's empty repository.
+ commit := ctx.Repo.Commit
+ if commit == nil {
+ ghost := user_model.NewGhostUser()
+ objectFormat := git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName)
+ commit = &git.Commit{
+ ID: objectFormat.EmptyObjectID(),
+ Author: ghost.NewGitSig(),
+ Committer: ghost.NewGitSig(),
+ CommitMessage: "This is a fake commit",
+ }
+ }
+
+ apiUser := convert.ToUserWithAccessMode(ctx, ctx.Doer, perm.AccessModeNone)
+
+ apiCommit := &api.PayloadCommit{
+ ID: commit.ID.String(),
+ Message: commit.Message(),
+ URL: ctx.Repo.Repository.HTMLURL() + "/commit/" + url.PathEscape(commit.ID.String()),
+ Author: &api.PayloadUser{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Committer: &api.PayloadUser{
+ Name: commit.Committer.Name,
+ Email: commit.Committer.Email,
+ },
+ }
+
+ commitID := commit.ID.String()
+ p := &api.PushPayload{
+ Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch,
+ Before: commitID,
+ After: commitID,
+ CompareURL: setting.AppURL + ctx.Repo.Repository.ComposeCompareURL(commitID, commitID),
+ Commits: []*api.PayloadCommit{apiCommit},
+ TotalCommits: 1,
+ HeadCommit: apiCommit,
+ Repo: convert.ToRepo(ctx, ctx.Repo.Repository, access_model.Permission{AccessMode: perm.AccessModeNone}),
+ Pusher: apiUser,
+ Sender: apiUser,
+ }
+ if err := webhook_service.PrepareWebhook(ctx, w, webhook_module.HookEventPush, p); err != nil {
+ ctx.Flash.Error("PrepareWebhook: " + err.Error())
+ ctx.Status(http.StatusInternalServerError)
+ } else {
+ ctx.Flash.Info(ctx.Tr("repo.settings.webhook.delivery.success"))
+ ctx.Status(http.StatusOK)
+ }
+}
+
+// WebhookReplay replays a webhook
+func WebhookReplay(ctx *context.Context) {
+ hookTaskUUID := ctx.Params(":uuid")
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := webhook_service.ReplayHookTask(ctx, w, hookTaskUUID); err != nil {
+ if webhook.IsErrHookTaskNotExist(err) {
+ ctx.NotFound("ReplayHookTask", nil)
+ } else {
+ ctx.ServerError("ReplayHookTask", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook.delivery.success"))
+ ctx.Redirect(fmt.Sprintf("%s/%d", orCtx.Link, w.ID))
+}
+
+// WebhookDelete delete a webhook
+func WebhookDelete(ctx *context.Context) {
+ if err := webhook.DeleteWebhookByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteWebhookByRepoID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/hooks")
+}
diff --git a/routers/web/repo/topic.go b/routers/web/repo/topic.go
new file mode 100644
index 0000000..d81a695
--- /dev/null
+++ b/routers/web/repo/topic.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+// TopicsPost response for creating repository
+func TopicsPost(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]any{
+ "message": "Only owners could change the topics.",
+ })
+ return
+ }
+
+ topics := make([]string, 0)
+ topicsStr := ctx.FormTrim("topics")
+ if len(topicsStr) > 0 {
+ topics = strings.Split(topicsStr, ",")
+ }
+
+ validTopics, invalidTopics := repo_model.SanitizeAndValidateTopics(topics)
+
+ if len(validTopics) > 25 {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]any{
+ "invalidTopics": nil,
+ "message": ctx.Tr("repo.topic.count_prompt"),
+ })
+ return
+ }
+
+ if len(invalidTopics) > 0 {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]any{
+ "invalidTopics": invalidTopics,
+ "message": ctx.Tr("repo.topic.format_prompt"),
+ })
+ return
+ }
+
+ err := repo_model.SaveTopics(ctx, ctx.Repo.Repository.ID, validTopics...)
+ if err != nil {
+ log.Error("SaveTopics failed: %v", err)
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "message": "Save topics failed.",
+ })
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "status": "ok",
+ })
+}
diff --git a/routers/web/repo/treelist.go b/routers/web/repo/treelist.go
new file mode 100644
index 0000000..d11af46
--- /dev/null
+++ b/routers/web/repo/treelist.go
@@ -0,0 +1,54 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/go-enry/go-enry/v2"
+)
+
+// TreeList get all files' entries of a repository
+func TreeList(ctx *context.Context) {
+ tree, err := ctx.Repo.Commit.SubTree("/")
+ if err != nil {
+ ctx.ServerError("Repo.Commit.SubTree", err)
+ return
+ }
+
+ entries, err := tree.ListEntriesRecursiveFast()
+ if err != nil {
+ ctx.ServerError("ListEntriesRecursiveFast", err)
+ return
+ }
+ entries.CustomSort(base.NaturalSortLess)
+
+ files := make([]string, 0, len(entries))
+ for _, entry := range entries {
+ if !isExcludedEntry(entry) {
+ files = append(files, entry.Name())
+ }
+ }
+ ctx.JSON(http.StatusOK, files)
+}
+
+func isExcludedEntry(entry *git.TreeEntry) bool {
+ if entry.IsDir() {
+ return true
+ }
+
+ if entry.IsSubModule() {
+ return true
+ }
+
+ if enry.IsVendor(entry.Name()) {
+ return true
+ }
+
+ return false
+}
diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go
new file mode 100644
index 0000000..f1445c5
--- /dev/null
+++ b/routers/web/repo/view.go
@@ -0,0 +1,1258 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ gocontext "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "html/template"
+ "image"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "slices"
+ "strings"
+ "time"
+
+ _ "image/gif" // for processing gif images
+ _ "image/jpeg" // for processing jpeg images
+ _ "image/png" // for processing png images
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ admin_model "code.gitea.io/gitea/models/admin"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issue_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/highlight"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/svg"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ files_service "code.gitea.io/gitea/services/repository/files"
+
+ "github.com/nektos/act/pkg/model"
+
+ _ "golang.org/x/image/bmp" // for processing bmp images
+ _ "golang.org/x/image/webp" // for processing webp images
+)
+
+const (
+ tplRepoEMPTY base.TplName = "repo/empty"
+ tplRepoHome base.TplName = "repo/home"
+ tplRepoViewList base.TplName = "repo/view_list"
+ tplWatchers base.TplName = "repo/watchers"
+ tplForks base.TplName = "repo/forks"
+ tplMigrating base.TplName = "repo/migrate/migrating"
+)
+
+// locate a README for a tree in one of the supported paths.
+//
+// entries is passed to reduce calls to ListEntries(), so
+// this has precondition:
+//
+// entries == ctx.Repo.Commit.SubTree(ctx.Repo.TreePath).ListEntries()
+//
+// FIXME: There has to be a more efficient way of doing this
+func FindReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
+ // Create a list of extensions in priority order
+ // 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
+ // 2. Org-Mode files - with and without localisation - e.g. README.en-us.org or README.org
+ // 3. Txt files - e.g. README.txt
+ // 4. No extension - e.g. README
+ exts := append(append(localizedExtensions(".md", ctx.Locale.Language()), localizedExtensions(".org", ctx.Locale.Language())...), ".txt", "") // sorted by priority
+ extCount := len(exts)
+ readmeFiles := make([]*git.TreeEntry, extCount+1)
+
+ docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
+ for _, entry := range entries {
+ if tryWellKnownDirs && entry.IsDir() {
+ // as a special case for the top-level repo introduction README,
+ // fall back to subfolders, looking for e.g. docs/README.md, .gitea/README.zh-CN.txt, .github/README.txt, ...
+ // (note that docsEntries is ignored unless we are at the root)
+ lowerName := strings.ToLower(entry.Name())
+ switch lowerName {
+ case "docs":
+ if entry.Name() == "docs" || docsEntries[0] == nil {
+ docsEntries[0] = entry
+ }
+ case ".forgejo":
+ if entry.Name() == ".forgejo" || docsEntries[1] == nil {
+ docsEntries[1] = entry
+ }
+ case ".gitea":
+ if entry.Name() == ".gitea" || docsEntries[1] == nil {
+ docsEntries[1] = entry
+ }
+ case ".github":
+ if entry.Name() == ".github" || docsEntries[2] == nil {
+ docsEntries[2] = entry
+ }
+ }
+ continue
+ }
+ if i, ok := util.IsReadmeFileExtension(entry.Name(), exts...); ok {
+ log.Debug("Potential readme file: %s", entry.Name())
+ if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].Name(), entry.Blob().Name()) {
+ if entry.IsLink() {
+ target, _, err := entry.FollowLinks()
+ if err != nil && !git.IsErrBadLink(err) {
+ return "", nil, err
+ } else if target != nil && (target.IsExecutable() || target.IsRegular()) {
+ readmeFiles[i] = entry
+ }
+ } else {
+ readmeFiles[i] = entry
+ }
+ }
+ }
+ }
+ var readmeFile *git.TreeEntry
+ for _, f := range readmeFiles {
+ if f != nil {
+ readmeFile = f
+ break
+ }
+ }
+
+ if ctx.Repo.TreePath == "" && readmeFile == nil {
+ for _, subTreeEntry := range docsEntries {
+ if subTreeEntry == nil {
+ continue
+ }
+ subTree := subTreeEntry.Tree()
+ if subTree == nil {
+ // this should be impossible; if subTreeEntry exists so should this.
+ continue
+ }
+ var err error
+ childEntries, err := subTree.ListEntries()
+ if err != nil {
+ return "", nil, err
+ }
+
+ subfolder, readmeFile, err := FindReadmeFileInEntries(ctx, childEntries, false)
+ if err != nil && !git.IsErrNotExist(err) {
+ return "", nil, err
+ }
+ if readmeFile != nil {
+ return path.Join(subTreeEntry.Name(), subfolder), readmeFile, nil
+ }
+ }
+ }
+
+ return "", readmeFile, nil
+}
+
+func renderDirectory(ctx *context.Context) {
+ entries := renderDirectoryFiles(ctx, 1*time.Second)
+ if ctx.Written() {
+ return
+ }
+
+ if ctx.Repo.TreePath != "" {
+ ctx.Data["HideRepoInfo"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefName)
+ }
+
+ subfolder, readmeFile, err := FindReadmeFileInEntries(ctx, entries, true)
+ if err != nil {
+ ctx.ServerError("findReadmeFileInEntries", err)
+ return
+ }
+
+ renderReadmeFile(ctx, subfolder, readmeFile)
+}
+
+// localizedExtensions prepends the provided language code with and without a
+// regional identifier to the provided extension.
+// Note: the language code will always be lower-cased, if a region is present it must be separated with a `-`
+// Note: ext should be prefixed with a `.`
+func localizedExtensions(ext, languageCode string) (localizedExts []string) {
+ if len(languageCode) < 1 {
+ return []string{ext}
+ }
+
+ lowerLangCode := "." + strings.ToLower(languageCode)
+
+ if strings.Contains(lowerLangCode, "-") {
+ underscoreLangCode := strings.ReplaceAll(lowerLangCode, "-", "_")
+ indexOfDash := strings.Index(lowerLangCode, "-")
+ // e.g. [.zh-cn.md, .zh_cn.md, .zh.md, _zh.md, .md]
+ return []string{lowerLangCode + ext, underscoreLangCode + ext, lowerLangCode[:indexOfDash] + ext, "_" + lowerLangCode[1:indexOfDash] + ext, ext}
+ }
+
+ // e.g. [.en.md, .md]
+ return []string{lowerLangCode + ext, ext}
+}
+
+type fileInfo struct {
+ isTextFile bool
+ isLFSFile bool
+ fileSize int64
+ lfsMeta *lfs.Pointer
+ st typesniffer.SniffedType
+}
+
+func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) ([]byte, io.ReadCloser, *fileInfo, error) {
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ isTextFile := st.IsText()
+
+ // FIXME: what happens when README file is an image?
+ if !isTextFile || !setting.LFS.StartServer {
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ pointer, _ := lfs.ReadPointerFromBuffer(buf)
+ if !pointer.IsValid() { // fallback to plain file
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, repoID, pointer.Oid)
+ if err != nil { // fallback to plain file
+ log.Warn("Unable to access LFS pointer %s in repo %d: %v", pointer.Oid, repoID, err)
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ dataRc.Close()
+
+ dataRc, err = lfs.ReadMetaObject(pointer)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ buf = make([]byte, 1024)
+ n, err = util.ReadAtMost(dataRc, buf)
+ if err != nil {
+ dataRc.Close()
+ return nil, nil, nil, err
+ }
+ buf = buf[:n]
+
+ st = typesniffer.DetectContentType(buf)
+
+ return buf, dataRc, &fileInfo{st.IsText(), true, meta.Size, &meta.Pointer, st}, nil
+}
+
+func renderReadmeFile(ctx *context.Context, subfolder string, readmeFile *git.TreeEntry) {
+ target := readmeFile
+ if readmeFile != nil && readmeFile.IsLink() {
+ target, _, _ = readmeFile.FollowLinks()
+ }
+ if target == nil {
+ // if findReadmeFile() failed and/or gave us a broken symlink (which it shouldn't)
+ // simply skip rendering the README
+ return
+ }
+
+ ctx.Data["RawFileLink"] = ""
+ ctx.Data["ReadmeInList"] = true
+ ctx.Data["ReadmeExist"] = true
+ ctx.Data["FileIsSymlink"] = readmeFile.IsLink()
+
+ buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, target.Blob())
+ if err != nil {
+ ctx.ServerError("getFileReader", err)
+ return
+ }
+ defer dataRc.Close()
+
+ ctx.Data["FileIsText"] = fInfo.isTextFile
+ ctx.Data["FileName"] = path.Join(subfolder, readmeFile.Name())
+ ctx.Data["IsLFSFile"] = fInfo.isLFSFile
+
+ if fInfo.isLFSFile {
+ filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.Name()))
+ ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.Link(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
+ }
+
+ if !fInfo.isTextFile {
+ return
+ }
+
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ // Pretend that this is a normal text file to display 'This file is too large to be shown'
+ ctx.Data["IsFileTooLarge"] = true
+ ctx.Data["IsTextFile"] = true
+ ctx.Data["FileSize"] = fInfo.fileSize
+ return
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ if markupType := markup.Type(readmeFile.Name()); markupType != "" {
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: path.Join(ctx.Repo.TreePath, readmeFile.Name()), // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Join(ctx.Repo.TreePath, subfolder),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.Name(), ctx.Repo.Repository, err)
+ delete(ctx.Data, "IsMarkup")
+ }
+ }
+
+ if ctx.Data["IsMarkup"] != true {
+ ctx.Data["IsPlainText"] = true
+ content, err := io.ReadAll(rd)
+ if err != nil {
+ log.Error("Read readme content failed: %v", err)
+ }
+ contentEscaped := template.HTMLEscapeString(util.UnsafeBytesToString(content))
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlHTML(template.HTML(contentEscaped), ctx.Locale, charset.FileviewContext)
+ }
+
+ if !fInfo.isLFSFile && ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ ctx.Data["CanEditReadmeFile"] = true
+ }
+}
+
+func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
+ // Show latest commit info of repository in table header,
+ // or of directory if not in root directory.
+ ctx.Data["LatestCommit"] = latestCommit
+ if latestCommit != nil {
+ verification := asymkey_model.ParseCommitWithSignature(ctx, latestCommit)
+
+ if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
+ }, nil); err != nil {
+ ctx.ServerError("CalculateTrustStatus", err)
+ return false
+ }
+ ctx.Data["LatestCommitVerification"] = verification
+ ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(ctx, latestCommit)
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptionsAll)
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ }
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, statuses)
+ }
+
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(statuses)
+ ctx.Data["LatestCommitStatuses"] = statuses
+ }
+
+ return true
+}
+
+func renderFile(ctx *context.Context, entry *git.TreeEntry) {
+ ctx.Data["IsViewFile"] = true
+ ctx.Data["HideRepoInfo"] = true
+ blob := entry.Blob()
+ buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, blob)
+ if err != nil {
+ ctx.ServerError("getFileReader", err)
+ return
+ }
+ defer dataRc.Close()
+
+ ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefName)
+ ctx.Data["FileIsSymlink"] = entry.IsLink()
+ ctx.Data["FileName"] = blob.Name()
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+
+ if entry.IsLink() {
+ _, link, err := entry.FollowLinks()
+ // Errors should be allowed, because this shouldn't
+ // block rendering invalid symlink files.
+ if err == nil {
+ ctx.Data["SymlinkURL"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(link)
+ }
+ }
+
+ commit, err := ctx.Repo.Commit.GetCommitByPath(ctx.Repo.TreePath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+
+ if !loadLatestCommitData(ctx, commit) {
+ return
+ }
+
+ if ctx.Repo.TreePath == ".editorconfig" {
+ _, editorconfigWarning, editorconfigErr := ctx.Repo.GetEditorconfig(ctx.Repo.Commit)
+ if editorconfigWarning != nil {
+ ctx.Data["FileWarning"] = strings.TrimSpace(editorconfigWarning.Error())
+ }
+ if editorconfigErr != nil {
+ ctx.Data["FileError"] = strings.TrimSpace(editorconfigErr.Error())
+ }
+ } else if issue_service.IsTemplateConfig(ctx.Repo.TreePath) {
+ _, issueConfigErr := issue_service.GetTemplateConfig(ctx.Repo.GitRepo, ctx.Repo.TreePath, ctx.Repo.Commit)
+ if issueConfigErr != nil {
+ ctx.Data["FileError"] = strings.TrimSpace(issueConfigErr.Error())
+ }
+ } else if actions.IsWorkflow(ctx.Repo.TreePath) {
+ content, err := actions.GetContentFromEntry(entry)
+ if err != nil {
+ log.Error("actions.GetContentFromEntry: %v", err)
+ }
+ _, workFlowErr := model.ReadWorkflow(bytes.NewReader(content))
+ if workFlowErr != nil {
+ ctx.Data["FileError"] = ctx.Locale.Tr("actions.runs.invalid_workflow_helper", workFlowErr.Error())
+ }
+ } else if slices.Contains([]string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}, ctx.Repo.TreePath) {
+ if data, err := blob.GetBlobContent(setting.UI.MaxDisplayFileSize); err == nil {
+ _, warnings := issue_model.GetCodeOwnersFromContent(ctx, data)
+ if len(warnings) > 0 {
+ ctx.Data["FileWarning"] = strings.Join(warnings, "\n")
+ }
+ }
+ }
+
+ isDisplayingSource := ctx.FormString("display") == "source"
+ isDisplayingRendered := !isDisplayingSource
+
+ if fInfo.isLFSFile {
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+
+ isRepresentableAsText := fInfo.st.IsRepresentableAsText()
+ if !isRepresentableAsText {
+ // If we can't show plain text, always try to render.
+ isDisplayingSource = false
+ isDisplayingRendered = true
+ }
+ ctx.Data["IsLFSFile"] = fInfo.isLFSFile
+ ctx.Data["FileSize"] = fInfo.fileSize
+ ctx.Data["IsTextFile"] = fInfo.isTextFile
+ ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
+ ctx.Data["IsDisplayingSource"] = isDisplayingSource
+ ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
+ ctx.Data["IsExecutable"] = entry.IsExecutable()
+
+ isTextSource := fInfo.isTextFile || isDisplayingSource
+ ctx.Data["IsTextSource"] = isTextSource
+ if isTextSource {
+ ctx.Data["CanCopyContent"] = true
+ }
+
+ // Check LFS Lock
+ lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
+ ctx.Data["LFSLock"] = lfsLock
+ if err != nil {
+ ctx.ServerError("GetTreePathLock", err)
+ return
+ }
+ if lfsLock != nil {
+ u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
+ if err != nil {
+ ctx.ServerError("GetTreePathLock", err)
+ return
+ }
+ ctx.Data["LFSLockOwner"] = u.Name
+ ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
+ ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
+ }
+
+ // Assume file is not editable first.
+ if fInfo.isLFSFile {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
+ } else if !isRepresentableAsText {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
+ }
+
+ switch {
+ case isRepresentableAsText:
+ if fInfo.st.IsSvgImage() {
+ ctx.Data["IsImageFile"] = true
+ ctx.Data["CanCopyContent"] = true
+ ctx.Data["HasSourceRenderedToggle"] = true
+ }
+
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ shouldRenderSource := ctx.FormString("display") == "source"
+ readmeExist := util.IsReadmeFileName(blob.Name())
+ ctx.Data["ReadmeExist"] = readmeExist
+
+ markupType := markup.Type(blob.Name())
+ // If the markup is detected by custom markup renderer it should not be reset later on
+ // to not pass it down to the render context.
+ detected := false
+ if markupType == "" {
+ detected = true
+ markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
+ }
+ if markupType != "" {
+ ctx.Data["HasSourceRenderedToggle"] = true
+ }
+
+ if markupType != "" && !shouldRenderSource {
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+ if !detected {
+ markupType = ""
+ }
+ metas := ctx.Repo.Repository.ComposeDocumentMetas(ctx)
+ metas["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ Type: markupType,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: metas,
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ ctx.ServerError("Render", err)
+ return
+ }
+ // to prevent iframe load third-party url
+ ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
+ } else {
+ buf, _ := io.ReadAll(rd)
+
+ // The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
+ // empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
+ // Forgejo uses the definition (like most modern editors):
+ // empty: 0 lines; "a": 1 line; "a\n": 1 line; "a\nb": 2 lines;
+ // When rendering, the last empty line is not rendered in U and isn't counted towards the number of lines.
+ // To tell users that the file not contains a trailing EOL, text with a tooltip is displayed in the file header.
+ // Trailing EOL is only considered if the file has content.
+ // This NumLines is only used for the display on the UI: "xxx lines"
+ if len(buf) == 0 {
+ ctx.Data["NumLines"] = 0
+ } else {
+ hasNoTrailingEOL := !bytes.HasSuffix(buf, []byte{'\n'})
+ ctx.Data["HasNoTrailingEOL"] = hasNoTrailingEOL
+
+ numLines := bytes.Count(buf, []byte{'\n'})
+ if hasNoTrailingEOL {
+ numLines++
+ }
+ ctx.Data["NumLines"] = numLines
+ }
+ ctx.Data["NumLinesSet"] = true
+
+ language, err := files_service.TryGetContentLanguage(ctx.Repo.GitRepo, ctx.Repo.CommitID, ctx.Repo.TreePath)
+ if err != nil {
+ log.Error("Unable to get file language for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
+ }
+
+ fileContent, lexerName, err := highlight.File(blob.Name(), language, buf)
+ ctx.Data["LexerName"] = lexerName
+ if err != nil {
+ log.Error("highlight.File failed, fallback to plain text: %v", err)
+ fileContent = highlight.PlainText(buf)
+ }
+ status := &charset.EscapeStatus{}
+ statuses := make([]*charset.EscapeStatus, len(fileContent))
+ for i, line := range fileContent {
+ statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale, charset.FileviewContext)
+ status = status.Or(statuses[i])
+ }
+ ctx.Data["EscapeStatus"] = status
+ ctx.Data["FileContent"] = fileContent
+ ctx.Data["LineEscapeStatus"] = statuses
+ }
+ if !fInfo.isLFSFile {
+ if ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
+ ctx.Data["CanEditFile"] = false
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
+ } else {
+ ctx.Data["CanEditFile"] = true
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file")
+ }
+ } else if !ctx.Repo.IsViewBranch {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
+ } else if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
+ }
+ }
+
+ case fInfo.st.IsPDF():
+ ctx.Data["IsPDFFile"] = true
+ case fInfo.st.IsVideo():
+ ctx.Data["IsVideoFile"] = true
+ case fInfo.st.IsAudio():
+ ctx.Data["IsAudioFile"] = true
+ case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
+ ctx.Data["IsImageFile"] = true
+ ctx.Data["CanCopyContent"] = true
+ default:
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ if markupType := markup.Type(blob.Name()); markupType != "" {
+ rd := io.MultiReader(bytes.NewReader(buf), dataRc)
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ ctx.ServerError("Render", err)
+ return
+ }
+ }
+ }
+
+ if ctx.Repo.GitRepo != nil {
+ attrs, err := ctx.Repo.GitRepo.GitAttributes(ctx.Repo.CommitID, ctx.Repo.TreePath, "linguist-vendored", "linguist-generated")
+ if err != nil {
+ log.Error("GitAttributes(%s, %s) failed: %v", ctx.Repo.CommitID, ctx.Repo.TreePath, err)
+ } else {
+ ctx.Data["IsVendored"] = attrs["linguist-vendored"].Bool().Value()
+ ctx.Data["IsGenerated"] = attrs["linguist-generated"].Bool().Value()
+ }
+ }
+
+ if fInfo.st.IsImage() && !fInfo.st.IsSvgImage() {
+ img, _, err := image.DecodeConfig(bytes.NewReader(buf))
+ if err == nil {
+ // There are Image formats go can't decode
+ // Instead of throwing an error in that case, we show the size only when we can decode
+ ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
+ }
+ }
+
+ if ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
+ ctx.Data["CanDeleteFile"] = false
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
+ } else {
+ ctx.Data["CanDeleteFile"] = true
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.delete_this_file")
+ }
+ } else if !ctx.Repo.IsViewBranch {
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
+ } else if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
+ }
+}
+
+func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) {
+ markupRd, markupWr := io.Pipe()
+ defer markupWr.Close()
+ done := make(chan struct{})
+ go func() {
+ sb := &strings.Builder{}
+ // We allow NBSP here this is rendered
+ escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.FileviewContext, charset.RuneNBSP)
+ output = template.HTML(sb.String())
+ close(done)
+ }()
+ err = markup.Render(renderCtx, input, markupWr)
+ _ = markupWr.CloseWithError(err)
+ <-done
+ return escaped, output, err
+}
+
+func checkHomeCodeViewable(ctx *context.Context) {
+ if len(ctx.Repo.Units) > 0 {
+ if ctx.Repo.Repository.IsBeingCreated() {
+ task, err := admin_model.GetMigratingTask(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if admin_model.IsErrTaskDoesNotExist(err) {
+ ctx.Data["Repo"] = ctx.Repo
+ ctx.Data["CloneAddr"] = ""
+ ctx.Data["Failed"] = true
+ ctx.HTML(http.StatusOK, tplMigrating)
+ return
+ }
+ ctx.ServerError("models.GetMigratingTask", err)
+ return
+ }
+ cfg, err := task.MigrateConfig()
+ if err != nil {
+ ctx.ServerError("task.MigrateConfig", err)
+ return
+ }
+
+ ctx.Data["Repo"] = ctx.Repo
+ ctx.Data["MigrateTask"] = task
+ ctx.Data["CloneAddr"], _ = util.SanitizeURL(cfg.CloneAddr)
+ ctx.Data["Failed"] = task.Status == structs.TaskStatusFailed
+ ctx.HTML(http.StatusOK, tplMigrating)
+ return
+ }
+
+ if ctx.IsSigned {
+ // Set repo notification-status read if unread
+ if err := activities_model.SetRepoReadBy(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return
+ }
+ }
+
+ var firstUnit *unit_model.Unit
+ for _, repoUnit := range ctx.Repo.Units {
+ if repoUnit.Type == unit_model.TypeCode {
+ return
+ }
+
+ unit, ok := unit_model.Units[repoUnit.Type]
+ if ok && (firstUnit == nil || !firstUnit.IsLessThan(unit)) && repoUnit.Type.CanBeDefault() {
+ firstUnit = &unit
+ }
+ }
+
+ if firstUnit != nil {
+ ctx.Redirect(ctx.Repo.Repository.Link() + firstUnit.URI)
+ return
+ }
+ }
+
+ ctx.NotFound("Home", errors.New(ctx.Locale.TrString("units.error.no_unit_allowed_repo")))
+}
+
+func checkCitationFile(ctx *context.Context, entry *git.TreeEntry) {
+ if entry.Name() != "" {
+ return
+ }
+ tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.SubTree", err)
+ return
+ }
+ allEntries, err := tree.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return
+ }
+ for _, entry := range allEntries {
+ if entry.Name() == "CITATION.cff" || entry.Name() == "CITATION.bib" {
+ // Read Citation file contents
+ if content, err := entry.Blob().GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("checkCitationFile: GetBlobContent: %v", err)
+ } else {
+ ctx.Data["CitationExist"] = true
+ ctx.Data["CitationFile"] = entry.Name()
+ ctx.PageData["citationFileContent"] = content
+ break
+ }
+ }
+ }
+}
+
+// Home render repository home page
+func Home(ctx *context.Context) {
+ if setting.Other.EnableFeed {
+ isFeed, _, showFeedType := feed.GetFeedType(ctx.Params(":reponame"), ctx.Req)
+ if isFeed {
+ if ctx.Link == fmt.Sprintf("%s.%s", ctx.Repo.RepoLink, showFeedType) {
+ feed.ShowRepoFeed(ctx, ctx.Repo.Repository, showFeedType)
+ return
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.NotFound("MustBeNotEmpty", nil)
+ return
+ }
+
+ if ctx.Repo.TreePath == "" {
+ feed.ShowBranchFeed(ctx, ctx.Repo.Repository, showFeedType)
+ } else {
+ feed.ShowFileFeed(ctx, ctx.Repo.Repository, showFeedType)
+ }
+ return
+ }
+ }
+
+ checkHomeCodeViewable(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ renderHomeCode(ctx)
+}
+
+// LastCommit returns lastCommit data for the provided branch/tag/commit and directory (in url) and filenames in body
+func LastCommit(ctx *context.Context) {
+ checkHomeCodeViewable(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ renderDirectoryFiles(ctx, 0)
+ if ctx.Written() {
+ return
+ }
+
+ var treeNames []string
+ paths := make([]string, 0, 5)
+ if len(ctx.Repo.TreePath) > 0 {
+ treeNames = strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ ctx.Data["HasParentPath"] = true
+ if len(paths)-2 >= 0 {
+ ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
+ }
+ }
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["BranchLink"] = branchLink
+
+ ctx.HTML(http.StatusOK, tplRepoViewList)
+}
+
+func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entries {
+ tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.SubTree", err)
+ return nil
+ }
+
+ ctx.Data["LastCommitLoaderURL"] = ctx.Repo.RepoLink + "/lastcommit/" + url.PathEscape(ctx.Repo.CommitID) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return nil
+ }
+
+ if !entry.IsDir() {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return nil
+ }
+
+ allEntries, err := tree.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return nil
+ }
+ allEntries.CustomSort(base.NaturalSortLess)
+
+ commitInfoCtx := gocontext.Context(ctx)
+ if timeout > 0 {
+ var cancel gocontext.CancelFunc
+ commitInfoCtx, cancel = gocontext.WithTimeout(ctx, timeout)
+ defer cancel()
+ }
+
+ files, latestCommit, err := allEntries.GetCommitsInfo(commitInfoCtx, ctx.Repo.Commit, ctx.Repo.TreePath)
+ if err != nil {
+ ctx.ServerError("GetCommitsInfo", err)
+ return nil
+ }
+ ctx.Data["Files"] = files
+ for _, f := range files {
+ if f.Commit == nil {
+ ctx.Data["HasFilesWithoutLatestCommit"] = true
+ break
+ }
+ }
+
+ if !loadLatestCommitData(ctx, latestCommit) {
+ return nil
+ }
+
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treeLink := branchLink
+
+ if len(ctx.Repo.TreePath) > 0 {
+ treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+
+ ctx.Data["TreeLink"] = treeLink
+ ctx.Data["SSHDomain"] = setting.SSH.Domain
+
+ return allEntries
+}
+
+func renderLanguageStats(ctx *context.Context) {
+ langs, err := repo_model.GetTopLanguageStats(ctx, ctx.Repo.Repository, 5)
+ if err != nil {
+ ctx.ServerError("Repo.GetTopLanguageStats", err)
+ return
+ }
+
+ ctx.Data["LanguageStats"] = langs
+}
+
+func renderRepoTopics(ctx *context.Context) {
+ topics, _, err := repo_model.FindTopics(ctx, &repo_model.FindTopicOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("models.FindTopics", err)
+ return
+ }
+ ctx.Data["Topics"] = topics
+}
+
+func prepareOpenWithEditorApps(ctx *context.Context) {
+ var tmplApps []map[string]any
+ apps := setting.Config().Repository.OpenWithEditorApps.Value(ctx)
+ if len(apps) == 0 {
+ apps = setting.DefaultOpenWithEditorApps()
+ }
+ for _, app := range apps {
+ schema, _, _ := strings.Cut(app.OpenURL, ":")
+ var iconHTML template.HTML
+ if schema == "vscode" || schema == "vscodium" || schema == "jetbrains" {
+ iconHTML = svg.RenderHTML(fmt.Sprintf("gitea-open-with-%s", schema), 16, "tw-mr-2")
+ } else {
+ iconHTML = svg.RenderHTML("gitea-git", 16, "tw-mr-2") // TODO: it could support user's customized icon in the future
+ }
+ tmplApps = append(tmplApps, map[string]any{
+ "DisplayName": app.DisplayName,
+ "OpenURL": app.OpenURL,
+ "IconHTML": iconHTML,
+ })
+ }
+ ctx.Data["OpenWithEditorApps"] = tmplApps
+}
+
+func renderHomeCode(ctx *context.Context) {
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["RepositoryUploadEnabled"] = setting.Repository.Upload.Enabled
+ prepareOpenWithEditorApps(ctx)
+
+ if ctx.Repo.Commit == nil || ctx.Repo.Repository.IsEmpty || ctx.Repo.Repository.IsBroken() {
+ showEmpty := true
+ var err error
+ if ctx.Repo.GitRepo != nil {
+ showEmpty, err = ctx.Repo.GitRepo.IsEmpty()
+ if err != nil {
+ log.Error("GitRepo.IsEmpty: %v", err)
+ ctx.Repo.Repository.Status = repo_model.RepositoryBroken
+ showEmpty = true
+ ctx.Flash.Error(ctx.Tr("error.occurred"), true)
+ }
+ }
+ if showEmpty {
+ ctx.HTML(http.StatusOK, tplRepoEMPTY)
+ return
+ }
+
+ // the repo is not really empty, so we should update the modal in database
+ // such problem may be caused by:
+ // 1) an error occurs during pushing/receiving. 2) the user replaces an empty git repo manually
+ // and even more: the IsEmpty flag is deeply broken and should be removed with the UI changed to manage to cope with empty repos.
+ // it's possible for a repository to be non-empty by that flag but still 500
+ // because there are no branches - only tags -or the default branch is non-extant as it has been 0-pushed.
+ ctx.Repo.Repository.IsEmpty = false
+ if err = repo_model.UpdateRepositoryCols(ctx, ctx.Repo.Repository, "is_empty"); err != nil {
+ ctx.ServerError("UpdateRepositoryCols", err)
+ return
+ }
+ if err = repo_module.UpdateRepoSize(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("UpdateRepoSize", err)
+ return
+ }
+
+ // the repo's IsEmpty has been updated, redirect to this page to make sure middlewares can get the correct values
+ link := ctx.Link
+ if ctx.Req.URL.RawQuery != "" {
+ link += "?" + ctx.Req.URL.RawQuery
+ }
+ ctx.Redirect(link)
+ return
+ }
+
+ title := ctx.Repo.Repository.Owner.Name + "/" + ctx.Repo.Repository.Name
+ if len(ctx.Repo.Repository.Description) > 0 {
+ title += ": " + ctx.Repo.Repository.Description
+ }
+ ctx.Data["Title"] = title
+
+ // Get Topics of this repo
+ renderRepoTopics(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ checkOutdatedBranch(ctx)
+
+ checkCitationFile(ctx, entry)
+ if ctx.Written() {
+ return
+ }
+
+ renderLanguageStats(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if entry.IsDir() {
+ renderDirectory(ctx)
+ } else {
+ renderFile(ctx, entry)
+ }
+ if ctx.Written() {
+ return
+ }
+
+ if ctx.Doer != nil {
+ if err := ctx.Repo.Repository.GetBaseRepo(ctx); err != nil {
+ ctx.ServerError("GetBaseRepo", err)
+ return
+ }
+
+ // If the repo is a mirror, don't display recently pushed branches.
+ if ctx.Repo.Repository.IsMirror {
+ goto PostRecentBranchCheck
+ }
+
+ // If pull requests aren't enabled for either the current repo, or its
+ // base, don't display recently pushed branches.
+ if !(ctx.Repo.Repository.AllowsPulls(ctx) ||
+ (ctx.Repo.Repository.BaseRepo != nil && ctx.Repo.Repository.BaseRepo.AllowsPulls(ctx))) {
+ goto PostRecentBranchCheck
+ }
+
+ // Find recently pushed new branches to *this* repo.
+ branches, err := git_model.FindRecentlyPushedNewBranches(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID, ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("FindRecentlyPushedBranches", err)
+ return
+ }
+
+ // If this is not a fork, check if the signed in user has a fork, and
+ // check branches there.
+ if !ctx.Repo.Repository.IsFork {
+ repo := repo_model.GetForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+ if repo != nil {
+ baseBranches, err := git_model.FindRecentlyPushedNewBranches(ctx, repo.ID, ctx.Doer.ID, repo.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("FindRecentlyPushedBranches", err)
+ return
+ }
+ branches = append(branches, baseBranches...)
+ }
+ }
+
+ // Filter out branches that have no relation to the default branch of
+ // the repository.
+ var filteredBranches []*git_model.Branch
+ for _, branch := range branches {
+ repo, err := branch.GetRepo(ctx)
+ if err != nil {
+ continue
+ }
+ gitRepo, err := git.OpenRepository(ctx, repo.RepoPath())
+ if err != nil {
+ continue
+ }
+ defer gitRepo.Close()
+ head, err := gitRepo.GetCommit(branch.CommitID)
+ if err != nil {
+ continue
+ }
+ defaultBranch, err := gitrepo.GetDefaultBranch(ctx, repo)
+ if err != nil {
+ continue
+ }
+ defaultBranchHead, err := gitRepo.GetCommit(defaultBranch)
+ if err != nil {
+ continue
+ }
+
+ hasMergeBase, err := head.HasPreviousCommit(defaultBranchHead.ID)
+ if err != nil {
+ continue
+ }
+
+ if hasMergeBase {
+ filteredBranches = append(filteredBranches, branch)
+ }
+ }
+
+ ctx.Data["RecentlyPushedNewBranches"] = filteredBranches
+ }
+
+PostRecentBranchCheck:
+ var treeNames []string
+ paths := make([]string, 0, 5)
+ if len(ctx.Repo.TreePath) > 0 {
+ treeNames = strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ ctx.Data["HasParentPath"] = true
+ if len(paths)-2 >= 0 {
+ ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
+ }
+ }
+
+ ctx.Data["Paths"] = paths
+
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treeLink := branchLink
+ if len(ctx.Repo.TreePath) > 0 {
+ treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+ ctx.Data["TreeLink"] = treeLink
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["BranchLink"] = branchLink
+ ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
+ ctx.HTML(http.StatusOK, tplRepoHome)
+}
+
+func checkOutdatedBranch(ctx *context.Context) {
+ if !(ctx.Repo.IsAdmin() || ctx.Repo.IsOwner()) {
+ return
+ }
+
+ // get the head commit of the branch since ctx.Repo.CommitID is not always the head commit of `ctx.Repo.BranchName`
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
+ if err != nil {
+ log.Error("GetBranchCommitID: %v", err)
+ // Don't return an error page, as it can be rechecked the next time the user opens the page.
+ return
+ }
+
+ dbBranch, err := git_model.GetBranch(ctx, ctx.Repo.Repository.ID, ctx.Repo.BranchName)
+ if err != nil {
+ log.Error("GetBranch: %v", err)
+ // Don't return an error page, as it can be rechecked the next time the user opens the page.
+ return
+ }
+
+ if dbBranch.CommitID != commit.ID.String() {
+ ctx.Flash.Warning(ctx.Tr("repo.error.broken_git_hook", "https://docs.gitea.com/help/faq#push-hook--webhook--actions-arent-running"), true)
+ }
+}
+
+// RenderUserCards render a page show users according the input template
+func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) {
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+ pager := context.NewPagination(total, setting.MaxUserCardsPerPage, page, 5)
+ ctx.Data["Page"] = pager
+
+ items, err := getter(db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.MaxUserCardsPerPage,
+ })
+ if err != nil {
+ ctx.ServerError("getter", err)
+ return
+ }
+ ctx.Data["Cards"] = items
+
+ ctx.HTML(http.StatusOK, tpl)
+}
+
+// Watchers render repository's watch users
+func Watchers(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.watchers")
+ ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
+ ctx.Data["PageIsWatchers"] = true
+
+ RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, func(opts db.ListOptions) ([]*user_model.User, error) {
+ return repo_model.GetRepoWatchers(ctx, ctx.Repo.Repository.ID, opts)
+ }, tplWatchers)
+}
+
+// Stars render repository's starred users
+func Stars(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.stargazers")
+ ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
+ ctx.Data["PageIsStargazers"] = true
+ RenderUserCards(ctx, ctx.Repo.Repository.NumStars, func(opts db.ListOptions) ([]*user_model.User, error) {
+ return repo_model.GetStargazers(ctx, ctx.Repo.Repository, opts)
+ }, tplWatchers)
+}
+
+// Forks render repository's forked users
+func Forks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.forks")
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ pager := context.NewPagination(ctx.Repo.Repository.NumForks, setting.MaxForksPerPage, page, 5)
+ ctx.Data["Page"] = pager
+
+ forks, err := repo_model.GetForks(ctx, ctx.Repo.Repository, db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.MaxForksPerPage,
+ })
+ if err != nil {
+ ctx.ServerError("GetForks", err)
+ return
+ }
+
+ for _, fork := range forks {
+ if err = fork.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return
+ }
+ }
+
+ ctx.Data["Forks"] = forks
+
+ ctx.HTML(http.StatusOK, tplForks)
+}
diff --git a/routers/web/repo/view_test.go b/routers/web/repo/view_test.go
new file mode 100644
index 0000000..73ba118
--- /dev/null
+++ b/routers/web/repo/view_test.go
@@ -0,0 +1,62 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "reflect"
+ "testing"
+)
+
+func Test_localizedExtensions(t *testing.T) {
+ tests := []struct {
+ name string
+ ext string
+ languageCode string
+ wantLocalizedExts []string
+ }{
+ {
+ name: "empty language",
+ ext: ".md",
+ wantLocalizedExts: []string{".md"},
+ },
+ {
+ name: "No region - lowercase",
+ languageCode: "en",
+ ext: ".csv",
+ wantLocalizedExts: []string{".en.csv", ".csv"},
+ },
+ {
+ name: "No region - uppercase",
+ languageCode: "FR",
+ ext: ".txt",
+ wantLocalizedExts: []string{".fr.txt", ".txt"},
+ },
+ {
+ name: "With region - lowercase",
+ languageCode: "en-us",
+ ext: ".md",
+ wantLocalizedExts: []string{".en-us.md", ".en_us.md", ".en.md", "_en.md", ".md"},
+ },
+ {
+ name: "With region - uppercase",
+ languageCode: "en-CA",
+ ext: ".MD",
+ wantLocalizedExts: []string{".en-ca.MD", ".en_ca.MD", ".en.MD", "_en.MD", ".MD"},
+ },
+ {
+ name: "With region - all uppercase",
+ languageCode: "ZH-TW",
+ ext: ".md",
+ wantLocalizedExts: []string{".zh-tw.md", ".zh_tw.md", ".zh.md", "_zh.md", ".md"},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if gotLocalizedExts := localizedExtensions(tt.ext, tt.languageCode); !reflect.DeepEqual(gotLocalizedExts, tt.wantLocalizedExts) {
+ t.Errorf("localizedExtensions() = %v, want %v", gotLocalizedExts, tt.wantLocalizedExts)
+ }
+ })
+ }
+}
diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go
new file mode 100644
index 0000000..1fd0800
--- /dev/null
+++ b/routers/web/repo/wiki.go
@@ -0,0 +1,816 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ notify_service "code.gitea.io/gitea/services/notify"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+)
+
+const (
+ tplWikiStart base.TplName = "repo/wiki/start"
+ tplWikiView base.TplName = "repo/wiki/view"
+ tplWikiRevision base.TplName = "repo/wiki/revision"
+ tplWikiNew base.TplName = "repo/wiki/new"
+ tplWikiPages base.TplName = "repo/wiki/pages"
+ tplWikiSearch base.TplName = "repo/wiki/search"
+)
+
+// MustEnableWiki check if wiki is enabled, if external then redirect
+func MustEnableWiki(ctx *context.Context) {
+ if !ctx.Repo.CanRead(unit.TypeWiki) &&
+ !ctx.Repo.CanRead(unit.TypeExternalWiki) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User %-v cannot read %-v or %-v of repo %-v\n"+
+ "User in repo has Permissions: %-+v",
+ ctx.Doer,
+ unit.TypeWiki,
+ unit.TypeExternalWiki,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ }
+ ctx.NotFound("MustEnableWiki", nil)
+ return
+ }
+
+ unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalWiki)
+ if err == nil {
+ ctx.Redirect(unit.ExternalWikiConfig().ExternalWikiURL)
+ return
+ }
+}
+
+// PageMeta wiki page meta information
+type PageMeta struct {
+ Name string
+ SubURL string
+ GitEntryName string
+ UpdatedUnix timeutil.TimeStamp
+}
+
+// findEntryForFile finds the tree entry for a target filepath.
+func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) {
+ entry, err := commit.GetTreeEntryByPath(target)
+ if err != nil && !git.IsErrNotExist(err) {
+ return nil, err
+ }
+ if entry != nil {
+ return entry, nil
+ }
+
+ // Then the unescaped, the shortest alternative
+ var unescapedTarget string
+ if unescapedTarget, err = url.QueryUnescape(target); err != nil {
+ return nil, err
+ }
+ return commit.GetTreeEntryByPath(unescapedTarget)
+}
+
+func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) {
+ wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil, nil, err
+ }
+
+ commit, err := wikiRepo.GetBranchCommit(ctx.Repo.Repository.GetWikiBranchName())
+ if err != nil {
+ return wikiRepo, nil, err
+ }
+ return wikiRepo, commit, nil
+}
+
+// wikiContentsByEntry returns the contents of the wiki page referenced by the
+// given tree entry. Writes to ctx if an error occurs.
+func wikiContentsByEntry(ctx *context.Context, entry *git.TreeEntry) []byte {
+ reader, err := entry.Blob().DataAsync()
+ if err != nil {
+ ctx.ServerError("Blob.Data", err)
+ return nil
+ }
+ defer reader.Close()
+ content, err := io.ReadAll(reader)
+ if err != nil {
+ ctx.ServerError("ReadAll", err)
+ return nil
+ }
+ return content
+}
+
+// wikiContentsByName returns the contents of a wiki page, along with a boolean
+// indicating whether the page exists. Writes to ctx if an error occurs.
+func wikiContentsByName(ctx *context.Context, commit *git.Commit, wikiName wiki_service.WebPath) ([]byte, *git.TreeEntry, string, bool) {
+ gitFilename := wiki_service.WebPathToGitPath(wikiName)
+ entry, err := findEntryForFile(commit, gitFilename)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findEntryForFile", err)
+ return nil, nil, "", false
+ } else if entry == nil {
+ return nil, nil, "", true
+ }
+ return wikiContentsByEntry(ctx, entry), entry, gitFilename, false
+}
+
+func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return nil, nil
+ }
+
+ // Get page list.
+ entries, err := commit.ListEntries()
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("ListEntries", err)
+ return nil, nil
+ }
+ pages := make([]PageMeta, 0, len(entries))
+ for _, entry := range entries {
+ if !entry.IsRegular() {
+ continue
+ }
+ wikiName, err := wiki_service.GitPathToWebPath(entry.Name())
+ if err != nil {
+ if repo_model.IsErrWikiInvalidFileName(err) {
+ continue
+ }
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("WikiFilenameToName", err)
+ return nil, nil
+ } else if wikiName == "_Sidebar" || wikiName == "_Footer" {
+ continue
+ }
+ _, displayName := wiki_service.WebPathToUserTitle(wikiName)
+ pages = append(pages, PageMeta{
+ Name: displayName,
+ SubURL: wiki_service.WebPathToURLPath(wikiName),
+ GitEntryName: entry.Name(),
+ })
+ }
+ ctx.Data["Pages"] = pages
+
+ // get requested page name
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ isSideBar := pageName == "_Sidebar"
+ isFooter := pageName == "_Footer"
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+
+ var sidebarContent []byte
+ if !isSideBar {
+ sidebarContent, _, _, _ = wikiContentsByName(ctx, commit, "_Sidebar")
+ if ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+ } else {
+ sidebarContent = data
+ }
+
+ var footerContent []byte
+ if !isFooter {
+ footerContent, _, _, _ = wikiContentsByName(ctx, commit, "_Footer")
+ if ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+ } else {
+ footerContent = data
+ }
+
+ rctx := &markup.RenderContext{
+ Ctx: ctx,
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ IsWiki: true,
+ }
+ buf := &strings.Builder{}
+
+ renderFn := func(data []byte) (escaped *charset.EscapeStatus, output string, err error) {
+ markupRd, markupWr := io.Pipe()
+ defer markupWr.Close()
+ done := make(chan struct{})
+ go func() {
+ // We allow NBSP here this is rendered
+ escaped, _ = charset.EscapeControlReader(markupRd, buf, ctx.Locale, charset.WikiContext, charset.RuneNBSP)
+ output = buf.String()
+ buf.Reset()
+ close(done)
+ }()
+
+ err = markdown.Render(rctx, bytes.NewReader(data), markupWr)
+ _ = markupWr.CloseWithError(err)
+ <-done
+ return escaped, output, err
+ }
+
+ ctx.Data["EscapeStatus"], ctx.Data["content"], err = renderFn(data)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+
+ if rctx.SidebarTocNode != nil {
+ sb := &strings.Builder{}
+ err = markdown.SpecializedMarkdown().Renderer().Render(sb, nil, rctx.SidebarTocNode)
+ if err != nil {
+ log.Error("Failed to render wiki sidebar TOC: %v", err)
+ } else {
+ ctx.Data["sidebarTocContent"] = sb.String()
+ }
+ }
+
+ if !isSideBar {
+ buf.Reset()
+ ctx.Data["sidebarEscapeStatus"], ctx.Data["sidebarContent"], err = renderFn(sidebarContent)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+ ctx.Data["sidebarPresent"] = sidebarContent != nil
+ } else {
+ ctx.Data["sidebarPresent"] = false
+ }
+
+ if !isFooter {
+ buf.Reset()
+ ctx.Data["footerEscapeStatus"], ctx.Data["footerContent"], err = renderFn(footerContent)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+ ctx.Data["footerPresent"] = footerContent != nil
+ } else {
+ ctx.Data["footerPresent"] = false
+ }
+
+ // get commit count - wiki revisions
+ commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.GetWikiBranchName(), pageFilename)
+ ctx.Data["CommitCount"] = commitsCount
+
+ return wikiRepo, entry
+}
+
+func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return nil, nil
+ }
+
+ // get requested pagename
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+
+ ctx.Data["content"] = string(data)
+ ctx.Data["sidebarPresent"] = false
+ ctx.Data["sidebarContent"] = ""
+ ctx.Data["footerPresent"] = false
+ ctx.Data["footerContent"] = ""
+
+ // get commit count - wiki revisions
+ commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.GetWikiBranchName(), pageFilename)
+ ctx.Data["CommitCount"] = commitsCount
+
+ // get page
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ // get Commit Count
+ commitsHistory, err := wikiRepo.CommitsByFileAndRange(
+ git.CommitsByFileAndRangeOptions{
+ Revision: ctx.Repo.Repository.GetWikiBranchName(),
+ File: pageFilename,
+ Page: page,
+ })
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("CommitsByFileAndRange", err)
+ return nil, nil
+ }
+ ctx.Data["Commits"] = git_model.ConvertFromGitCommit(ctx, commitsHistory, ctx.Repo.Repository)
+
+ pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParamString("action", "_revision")
+ ctx.Data["Page"] = pager
+
+ return wikiRepo, entry
+}
+
+func renderEditPage(ctx *context.Context) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return
+ }
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ // get requested pagename
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, _, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ return
+ }
+
+ ctx.Data["content"] = string(data)
+ ctx.Data["sidebarPresent"] = false
+ ctx.Data["sidebarContent"] = ""
+ ctx.Data["footerPresent"] = false
+ ctx.Data["footerContent"] = ""
+}
+
+// WikiPost renders post of wiki page
+func WikiPost(ctx *context.Context) {
+ switch ctx.FormString("action") {
+ case "_new":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ NewWikiPost(ctx)
+ return
+ case "_delete":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ DeleteWikiPagePost(ctx)
+ return
+ }
+
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ EditWikiPost(ctx)
+}
+
+// Wiki renders single wiki page
+func Wiki(ctx *context.Context) {
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ switch ctx.FormString("action") {
+ case "_pages":
+ WikiPages(ctx)
+ return
+ case "_revision":
+ WikiRevision(ctx)
+ return
+ case "_edit":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ EditWiki(ctx)
+ return
+ case "_new":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ NewWiki(ctx)
+ return
+ }
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiRepo, entry := renderViewPage(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+ if entry == nil {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiPath := entry.Name()
+ if markup.Type(wikiPath) != markdown.MarkupName {
+ ext := strings.ToUpper(filepath.Ext(wikiPath))
+ ctx.Data["FormatWarning"] = fmt.Sprintf("%s rendering is not supported at the moment. Rendered as Markdown.", ext)
+ }
+ // Get last change information.
+ lastCommit, err := wikiRepo.GetCommitByPath(wikiPath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+ ctx.Data["Author"] = lastCommit.Author
+
+ ctx.HTML(http.StatusOK, tplWikiView)
+}
+
+// WikiRevision renders file revision list of wiki page
+func WikiRevision(ctx *context.Context) {
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiRepo, entry := renderRevisionPage(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ if ctx.Written() {
+ return
+ }
+ if entry == nil {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ // Get last change information.
+ wikiPath := entry.Name()
+ lastCommit, err := wikiRepo.GetCommitByPath(wikiPath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+ ctx.Data["Author"] = lastCommit.Author
+
+ ctx.HTML(http.StatusOK, tplWikiRevision)
+}
+
+// WikiPages render wiki pages list page
+func WikiPages(ctx *context.Context) {
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki")
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.pages")
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return
+ }
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ entries, err := commit.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return
+ }
+ pages := make([]PageMeta, 0, len(entries))
+ for _, entry := range entries {
+ if !entry.IsRegular() {
+ continue
+ }
+ c, err := wikiRepo.GetCommitByPath(entry.Name())
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+ wikiName, err := wiki_service.GitPathToWebPath(entry.Name())
+ if err != nil {
+ if repo_model.IsErrWikiInvalidFileName(err) {
+ continue
+ }
+ ctx.ServerError("WikiFilenameToName", err)
+ return
+ }
+ _, displayName := wiki_service.WebPathToUserTitle(wikiName)
+ pages = append(pages, PageMeta{
+ Name: displayName,
+ SubURL: wiki_service.WebPathToURLPath(wikiName),
+ GitEntryName: entry.Name(),
+ UpdatedUnix: timeutil.TimeStamp(c.Author.When.Unix()),
+ })
+ }
+ ctx.Data["Pages"] = pages
+
+ ctx.HTML(http.StatusOK, tplWikiPages)
+}
+
+// WikiRaw outputs raw blob requested by user (image for example)
+func WikiRaw(ctx *context.Context) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("findEntryForFile", nil)
+ return
+ }
+ ctx.ServerError("findEntryForfile", err)
+ return
+ }
+
+ providedWebPath := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ providedGitPath := wiki_service.WebPathToGitPath(providedWebPath)
+ var entry *git.TreeEntry
+ if commit != nil {
+ // Try to find a file with that name
+ entry, err = findEntryForFile(commit, providedGitPath)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findFile", err)
+ return
+ }
+
+ if entry == nil {
+ // Try to find a wiki page with that name
+ providedGitPath = strings.TrimSuffix(providedGitPath, ".md")
+ entry, err = findEntryForFile(commit, providedGitPath)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findFile", err)
+ return
+ }
+ }
+ }
+
+ if entry != nil {
+ if err = common.ServeBlob(ctx.Base, ctx.Repo.TreePath, entry.Blob(), nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+ return
+ }
+
+ ctx.NotFound("findEntryForFile", nil)
+}
+
+// NewWiki render wiki create page
+func NewWiki(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["title"] = "Home"
+ }
+ if ctx.FormString("title") != "" {
+ ctx.Data["title"] = ctx.FormString("title")
+ }
+
+ ctx.HTML(http.StatusOK, tplWikiNew)
+}
+
+// NewWikiPost response for wiki create request
+func NewWikiPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewWikiForm)
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplWikiNew)
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.RenderWithErr(ctx.Tr("repo.issues.new.title_empty"), tplWikiNew, form)
+ return
+ }
+
+ wikiName := wiki_service.UserTitleToWebPath("", form.Title)
+
+ if len(form.Message) == 0 {
+ form.Message = ctx.Locale.TrString("repo.editor.add", form.Title)
+ }
+
+ if err := wiki_service.AddWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName, form.Content, form.Message); err != nil {
+ if repo_model.IsErrWikiReservedName(err) {
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.wiki.reserved_page", wikiName), tplWikiNew, &form)
+ } else if repo_model.IsErrWikiAlreadyExist(err) {
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.wiki.page_already_exists"), tplWikiNew, &form)
+ } else {
+ ctx.ServerError("AddWikiPage", err)
+ }
+ return
+ }
+
+ notify_service.NewWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(wikiName), form.Message)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + wiki_service.WebPathToURLPath(wikiName))
+}
+
+// EditWiki render wiki modify page
+func EditWiki(ctx *context.Context) {
+ ctx.Data["PageIsWikiEdit"] = true
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki")
+ return
+ }
+
+ renderEditPage(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplWikiNew)
+}
+
+// EditWikiPost response for wiki modify request
+func EditWikiPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewWikiForm)
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplWikiNew)
+ return
+ }
+
+ oldWikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
+
+ if len(form.Message) == 0 {
+ form.Message = ctx.Locale.TrString("repo.editor.update", form.Title)
+ }
+
+ if err := wiki_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, oldWikiName, newWikiName, form.Content, form.Message); err != nil {
+ ctx.ServerError("EditWikiPage", err)
+ return
+ }
+
+ notify_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(newWikiName), form.Message)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + wiki_service.WebPathToURLPath(newWikiName))
+}
+
+// DeleteWikiPagePost delete wiki page
+func DeleteWikiPagePost(ctx *context.Context) {
+ wikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(wikiName) == 0 {
+ wikiName = "Home"
+ }
+
+ if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil {
+ ctx.ServerError("DeleteWikiPage", err)
+ return
+ }
+
+ notify_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(wikiName))
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/wiki/")
+}
+
+func WikiSearchContent(ctx *context.Context) {
+ keyword := ctx.FormTrim("q")
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplWikiSearch)
+ return
+ }
+
+ res, err := wiki_service.SearchWikiContents(ctx, ctx.Repo.Repository, keyword)
+ if err != nil {
+ ctx.ServerError("SearchWikiContents", err)
+ return
+ }
+
+ ctx.Data["Results"] = res
+ ctx.HTML(http.StatusOK, tplWikiSearch)
+}
diff --git a/routers/web/repo/wiki_test.go b/routers/web/repo/wiki_test.go
new file mode 100644
index 0000000..00a35a5
--- /dev/null
+++ b/routers/web/repo/wiki_test.go
@@ -0,0 +1,224 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "io"
+ "net/http"
+ "net/url"
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+const (
+ content = "Wiki contents for unit tests"
+ message = "Wiki commit message for unit tests"
+)
+
+func wikiEntry(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) *git.TreeEntry {
+ wikiRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer wikiRepo.Close()
+ commit, err := wikiRepo.GetBranchCommit("master")
+ require.NoError(t, err)
+ entries, err := commit.ListEntries()
+ require.NoError(t, err)
+ for _, entry := range entries {
+ if entry.Name() == wiki_service.WebPathToGitPath(wikiName) {
+ return entry
+ }
+ }
+ return nil
+}
+
+func wikiContent(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) string {
+ entry := wikiEntry(t, repo, wikiName)
+ if !assert.NotNil(t, entry) {
+ return ""
+ }
+ reader, err := entry.Blob().DataAsync()
+ require.NoError(t, err)
+ defer reader.Close()
+ bytes, err := io.ReadAll(reader)
+ require.NoError(t, err)
+ return string(bytes)
+}
+
+func assertWikiExists(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) {
+ assert.NotNil(t, wikiEntry(t, repo, wikiName))
+}
+
+func assertWikiNotExists(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) {
+ assert.Nil(t, wikiEntry(t, repo, wikiName))
+}
+
+func assertPagesMetas(t *testing.T, expectedNames []string, metas any) {
+ pageMetas, ok := metas.([]PageMeta)
+ if !assert.True(t, ok) {
+ return
+ }
+ if !assert.Len(t, pageMetas, len(expectedNames)) {
+ return
+ }
+ for i, pageMeta := range pageMetas {
+ assert.EqualValues(t, expectedNames[i], pageMeta.Name)
+ }
+}
+
+func TestWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadRepo(t, ctx, 1)
+ Wiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, "Home", ctx.Data["Title"])
+ assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
+}
+
+func TestWikiPages(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_pages")
+ contexttest.LoadRepo(t, ctx, 1)
+ WikiPages(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
+}
+
+func TestNewWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ NewWiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, ctx.Tr("repo.wiki.new_page"), ctx.Data["Title"])
+}
+
+func TestNewWikiPost(t *testing.T) {
+ for _, title := range []string{
+ "New page",
+ "&&&&",
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: title,
+ Content: content,
+ Message: message,
+ })
+ NewWikiPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
+ assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
+ }
+}
+
+func TestNewWikiPost_ReservedName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: "_edit",
+ Content: content,
+ Message: message,
+ })
+ NewWikiPost(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, ctx.Tr("repo.wiki.reserved_page"), ctx.Flash.ErrorMsg)
+ assertWikiNotExists(t, ctx.Repo.Repository, "_edit")
+}
+
+func TestEditWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_edit")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ EditWiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, "Home", ctx.Data["Title"])
+ assert.Equal(t, wikiContent(t, ctx.Repo.Repository, "Home"), ctx.Data["content"])
+}
+
+func TestEditWikiPost(t *testing.T) {
+ for _, title := range []string{
+ "Home",
+ "New/<page>",
+ } {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_new")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: title,
+ Content: content,
+ Message: message,
+ })
+ EditWikiPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
+ assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
+ if title != "Home" {
+ assertWikiNotExists(t, ctx.Repo.Repository, "Home")
+ }
+ }
+}
+
+func TestDeleteWikiPagePost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_delete")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ DeleteWikiPagePost(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assertWikiNotExists(t, ctx.Repo.Repository, "Home")
+}
+
+func TestWikiRaw(t *testing.T) {
+ for filepath, filetype := range map[string]string{
+ "jpeg.jpg": "image/jpeg",
+ "images/jpeg.jpg": "image/jpeg",
+ "Page With Spaced Name": "text/plain; charset=utf-8",
+ "Page-With-Spaced-Name": "text/plain; charset=utf-8",
+ "Page With Spaced Name.md": "", // there is no "Page With Spaced Name.md" in repo
+ "Page-With-Spaced-Name.md": "text/plain; charset=utf-8",
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/raw/"+url.PathEscape(filepath))
+ ctx.SetParams("*", filepath)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ WikiRaw(ctx)
+ if filetype == "" {
+ assert.EqualValues(t, http.StatusNotFound, ctx.Resp.Status(), "filepath: %s", filepath)
+ } else {
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status(), "filepath: %s", filepath)
+ assert.EqualValues(t, filetype, ctx.Resp.Header().Get("Content-Type"), "filepath: %s", filepath)
+ }
+ }
+}