summaryrefslogtreecommitdiffstats
path: root/routers/web
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
commitdd136858f1ea40ad3c94191d647487fa4f31926c (patch)
tree58fec94a7b2a12510c9664b21793f1ed560c6518 /routers/web
parentInitial commit. (diff)
downloadforgejo-debian.tar.xz
forgejo-debian.zip
Adding upstream version 9.0.0.upstream/9.0.0upstreamdebian
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to '')
-rw-r--r--routers/web/admin/admin.go254
-rw-r--r--routers/web/admin/admin_test.go117
-rw-r--r--routers/web/admin/applications.go90
-rw-r--r--routers/web/admin/auths.go465
-rw-r--r--routers/web/admin/config.go255
-rw-r--r--routers/web/admin/diagnosis.go68
-rw-r--r--routers/web/admin/emails.go182
-rw-r--r--routers/web/admin/hooks.go73
-rw-r--r--routers/web/admin/main_test.go14
-rw-r--r--routers/web/admin/notice.go78
-rw-r--r--routers/web/admin/orgs.go39
-rw-r--r--routers/web/admin/packages.go113
-rw-r--r--routers/web/admin/queue.go89
-rw-r--r--routers/web/admin/queue_tester.go77
-rw-r--r--routers/web/admin/repos.go163
-rw-r--r--routers/web/admin/runners.go13
-rw-r--r--routers/web/admin/stacktrace.go46
-rw-r--r--routers/web/admin/users.go557
-rw-r--r--routers/web/admin/users_test.go200
-rw-r--r--routers/web/auth/2fa.go163
-rw-r--r--routers/web/auth/auth.go881
-rw-r--r--routers/web/auth/auth_test.go43
-rw-r--r--routers/web/auth/linkaccount.go308
-rw-r--r--routers/web/auth/main_test.go14
-rw-r--r--routers/web/auth/oauth.go1422
-rw-r--r--routers/web/auth/oauth_test.go103
-rw-r--r--routers/web/auth/openid.go391
-rw-r--r--routers/web/auth/password.go317
-rw-r--r--routers/web/auth/webauthn.go177
-rw-r--r--routers/web/base.go98
-rw-r--r--routers/web/devtest/devtest.go66
-rw-r--r--routers/web/events/events.go122
-rw-r--r--routers/web/explore/code.go144
-rw-r--r--routers/web/explore/org.go48
-rw-r--r--routers/web/explore/repo.go193
-rw-r--r--routers/web/explore/topic.go41
-rw-r--r--routers/web/explore/user.go163
-rw-r--r--routers/web/feed/branch.go50
-rw-r--r--routers/web/feed/convert.go332
-rw-r--r--routers/web/feed/file.go62
-rw-r--r--routers/web/feed/profile.go87
-rw-r--r--routers/web/feed/release.go52
-rw-r--r--routers/web/feed/render.go19
-rw-r--r--routers/web/feed/repo.go44
-rw-r--r--routers/web/githttp.go42
-rw-r--r--routers/web/goget.go93
-rw-r--r--routers/web/healthcheck/check.go140
-rw-r--r--routers/web/home.go117
-rw-r--r--routers/web/metrics.go33
-rw-r--r--routers/web/misc/markup.go18
-rw-r--r--routers/web/misc/misc.go49
-rw-r--r--routers/web/misc/swagger-forgejo.go19
-rw-r--r--routers/web/misc/swagger.go20
-rw-r--r--routers/web/nodeinfo.go32
-rw-r--r--routers/web/org/home.go189
-rw-r--r--routers/web/org/main_test.go14
-rw-r--r--routers/web/org/members.go144
-rw-r--r--routers/web/org/org.go80
-rw-r--r--routers/web/org/org_labels.go116
-rw-r--r--routers/web/org/projects.go610
-rw-r--r--routers/web/org/projects_test.go28
-rw-r--r--routers/web/org/setting.go258
-rw-r--r--routers/web/org/setting/blocked_users.go85
-rw-r--r--routers/web/org/setting/runners.go12
-rw-r--r--routers/web/org/setting_oauth2.go102
-rw-r--r--routers/web/org/setting_packages.go131
-rw-r--r--routers/web/org/teams.go628
-rw-r--r--routers/web/repo/actions/actions.go247
-rw-r--r--routers/web/repo/actions/manual.go62
-rw-r--r--routers/web/repo/actions/view.go781
-rw-r--r--routers/web/repo/activity.go105
-rw-r--r--routers/web/repo/attachment.go163
-rw-r--r--routers/web/repo/badges/badges.go164
-rw-r--r--routers/web/repo/blame.go298
-rw-r--r--routers/web/repo/branch.go262
-rw-r--r--routers/web/repo/cherry_pick.go192
-rw-r--r--routers/web/repo/code_frequency.go41
-rw-r--r--routers/web/repo/commit.go468
-rw-r--r--routers/web/repo/compare.go972
-rw-r--r--routers/web/repo/contributors.go38
-rw-r--r--routers/web/repo/download.go170
-rw-r--r--routers/web/repo/editor.go962
-rw-r--r--routers/web/repo/editor_test.go73
-rw-r--r--routers/web/repo/find.go24
-rw-r--r--routers/web/repo/flags/manage.go49
-rw-r--r--routers/web/repo/githttp.go599
-rw-r--r--routers/web/repo/githttp_test.go42
-rw-r--r--routers/web/repo/helper.go44
-rw-r--r--routers/web/repo/helper_test.go26
-rw-r--r--routers/web/repo/issue.go3822
-rw-r--r--routers/web/repo/issue_content_history.go237
-rw-r--r--routers/web/repo/issue_dependency.go144
-rw-r--r--routers/web/repo/issue_label.go229
-rw-r--r--routers/web/repo/issue_label_test.go173
-rw-r--r--routers/web/repo/issue_lock.go65
-rw-r--r--routers/web/repo/issue_pin.go107
-rw-r--r--routers/web/repo/issue_stopwatch.go113
-rw-r--r--routers/web/repo/issue_test.go375
-rw-r--r--routers/web/repo/issue_timetrack.go87
-rw-r--r--routers/web/repo/issue_watch.go63
-rw-r--r--routers/web/repo/main_test.go14
-rw-r--r--routers/web/repo/middlewares.go120
-rw-r--r--routers/web/repo/migrate.go310
-rw-r--r--routers/web/repo/milestone.go304
-rw-r--r--routers/web/repo/packages.go78
-rw-r--r--routers/web/repo/patch.go124
-rw-r--r--routers/web/repo/projects.go670
-rw-r--r--routers/web/repo/projects_test.go27
-rw-r--r--routers/web/repo/pull.go1838
-rw-r--r--routers/web/repo/pull_review.go316
-rw-r--r--routers/web/repo/pull_review_test.go104
-rw-r--r--routers/web/repo/recent_commits.go41
-rw-r--r--routers/web/repo/release.go857
-rw-r--r--routers/web/repo/release_test.go124
-rw-r--r--routers/web/repo/render.go76
-rw-r--r--routers/web/repo/repo.go774
-rw-r--r--routers/web/repo/search.go105
-rw-r--r--routers/web/repo/setting/avatar.go76
-rw-r--r--routers/web/repo/setting/collaboration.go217
-rw-r--r--routers/web/repo/setting/default_branch.go54
-rw-r--r--routers/web/repo/setting/deploy_key.go109
-rw-r--r--routers/web/repo/setting/git_hooks.go65
-rw-r--r--routers/web/repo/setting/lfs.go562
-rw-r--r--routers/web/repo/setting/main_test.go14
-rw-r--r--routers/web/repo/setting/protected_branch.go347
-rw-r--r--routers/web/repo/setting/protected_tag.go188
-rw-r--r--routers/web/repo/setting/runners.go187
-rw-r--r--routers/web/repo/setting/secrets.go127
-rw-r--r--routers/web/repo/setting/setting.go1115
-rw-r--r--routers/web/repo/setting/settings_test.go412
-rw-r--r--routers/web/repo/setting/variables.go140
-rw-r--r--routers/web/repo/setting/webhook.go485
-rw-r--r--routers/web/repo/topic.go60
-rw-r--r--routers/web/repo/treelist.go54
-rw-r--r--routers/web/repo/view.go1258
-rw-r--r--routers/web/repo/view_test.go62
-rw-r--r--routers/web/repo/wiki.go816
-rw-r--r--routers/web/repo/wiki_test.go224
-rw-r--r--routers/web/shared/actions/runners.go161
-rw-r--r--routers/web/shared/actions/variables.go65
-rw-r--r--routers/web/shared/packages/packages.go260
-rw-r--r--routers/web/shared/project/column.go48
-rw-r--r--routers/web/shared/secrets/secrets.go53
-rw-r--r--routers/web/shared/user/header.go163
-rw-r--r--routers/web/swagger_json.go13
-rw-r--r--routers/web/user/avatar.go57
-rw-r--r--routers/web/user/code.go129
-rw-r--r--routers/web/user/home.go883
-rw-r--r--routers/web/user/home_test.go169
-rw-r--r--routers/web/user/main_test.go14
-rw-r--r--routers/web/user/notification.go485
-rw-r--r--routers/web/user/package.go513
-rw-r--r--routers/web/user/profile.go385
-rw-r--r--routers/web/user/search.go44
-rw-r--r--routers/web/user/setting/account.go344
-rw-r--r--routers/web/user/setting/account_test.go101
-rw-r--r--routers/web/user/setting/adopt.go64
-rw-r--r--routers/web/user/setting/applications.go115
-rw-r--r--routers/web/user/setting/blocked_users.go46
-rw-r--r--routers/web/user/setting/keys.go338
-rw-r--r--routers/web/user/setting/main_test.go14
-rw-r--r--routers/web/user/setting/oauth2.go68
-rw-r--r--routers/web/user/setting/oauth2_common.go163
-rw-r--r--routers/web/user/setting/packages.go119
-rw-r--r--routers/web/user/setting/profile.go433
-rw-r--r--routers/web/user/setting/runner.go13
-rw-r--r--routers/web/user/setting/security/2fa.go260
-rw-r--r--routers/web/user/setting/security/openid.go126
-rw-r--r--routers/web/user/setting/security/security.go148
-rw-r--r--routers/web/user/setting/security/webauthn.go137
-rw-r--r--routers/web/user/setting/webhooks.go49
-rw-r--r--routers/web/user/stop_watch.go40
-rw-r--r--routers/web/user/task.go53
-rw-r--r--routers/web/web.go1658
-rw-r--r--routers/web/webfinger.go167
175 files changed, 42742 insertions, 0 deletions
diff --git a/routers/web/admin/admin.go b/routers/web/admin/admin.go
new file mode 100644
index 0000000..067203b
--- /dev/null
+++ b/routers/web/admin/admin.go
@@ -0,0 +1,254 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "fmt"
+ "net/http"
+ "reflect"
+ "runtime"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/updatechecker"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/cron"
+ "code.gitea.io/gitea/services/forms"
+ release_service "code.gitea.io/gitea/services/release"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplDashboard base.TplName = "admin/dashboard"
+ tplSystemStatus base.TplName = "admin/system_status"
+ tplSelfCheck base.TplName = "admin/self_check"
+ tplCron base.TplName = "admin/cron"
+ tplQueue base.TplName = "admin/queue"
+ tplStacktrace base.TplName = "admin/stacktrace"
+ tplQueueManage base.TplName = "admin/queue_manage"
+ tplStats base.TplName = "admin/stats"
+)
+
+var sysStatus struct {
+ StartTime string
+ NumGoroutine int
+
+ // General statistics.
+ MemAllocated string // bytes allocated and still in use
+ MemTotal string // bytes allocated (even if freed)
+ MemSys string // bytes obtained from system (sum of XxxSys below)
+ Lookups uint64 // number of pointer lookups
+ MemMallocs uint64 // number of mallocs
+ MemFrees uint64 // number of frees
+
+ // Main allocation heap statistics.
+ HeapAlloc string // bytes allocated and still in use
+ HeapSys string // bytes obtained from system
+ HeapIdle string // bytes in idle spans
+ HeapInuse string // bytes in non-idle span
+ HeapReleased string // bytes released to the OS
+ HeapObjects uint64 // total number of allocated objects
+
+ // Low-level fixed-size structure allocator statistics.
+ // Inuse is bytes used now.
+ // Sys is bytes obtained from system.
+ StackInuse string // bootstrap stacks
+ StackSys string
+ MSpanInuse string // mspan structures
+ MSpanSys string
+ MCacheInuse string // mcache structures
+ MCacheSys string
+ BuckHashSys string // profiling bucket hash table
+ GCSys string // GC metadata
+ OtherSys string // other system allocations
+
+ // Garbage collector statistics.
+ NextGC string // next run in HeapAlloc time (bytes)
+ LastGCTime string // last run time
+ PauseTotalNs string
+ PauseNs string // circular buffer of recent GC pause times, most recent at [(NumGC+255)%256]
+ NumGC uint32
+}
+
+func updateSystemStatus() {
+ sysStatus.StartTime = setting.AppStartTime.Format(time.RFC3339)
+
+ m := new(runtime.MemStats)
+ runtime.ReadMemStats(m)
+ sysStatus.NumGoroutine = runtime.NumGoroutine()
+
+ sysStatus.MemAllocated = base.FileSize(int64(m.Alloc))
+ sysStatus.MemTotal = base.FileSize(int64(m.TotalAlloc))
+ sysStatus.MemSys = base.FileSize(int64(m.Sys))
+ sysStatus.Lookups = m.Lookups
+ sysStatus.MemMallocs = m.Mallocs
+ sysStatus.MemFrees = m.Frees
+
+ sysStatus.HeapAlloc = base.FileSize(int64(m.HeapAlloc))
+ sysStatus.HeapSys = base.FileSize(int64(m.HeapSys))
+ sysStatus.HeapIdle = base.FileSize(int64(m.HeapIdle))
+ sysStatus.HeapInuse = base.FileSize(int64(m.HeapInuse))
+ sysStatus.HeapReleased = base.FileSize(int64(m.HeapReleased))
+ sysStatus.HeapObjects = m.HeapObjects
+
+ sysStatus.StackInuse = base.FileSize(int64(m.StackInuse))
+ sysStatus.StackSys = base.FileSize(int64(m.StackSys))
+ sysStatus.MSpanInuse = base.FileSize(int64(m.MSpanInuse))
+ sysStatus.MSpanSys = base.FileSize(int64(m.MSpanSys))
+ sysStatus.MCacheInuse = base.FileSize(int64(m.MCacheInuse))
+ sysStatus.MCacheSys = base.FileSize(int64(m.MCacheSys))
+ sysStatus.BuckHashSys = base.FileSize(int64(m.BuckHashSys))
+ sysStatus.GCSys = base.FileSize(int64(m.GCSys))
+ sysStatus.OtherSys = base.FileSize(int64(m.OtherSys))
+
+ sysStatus.NextGC = base.FileSize(int64(m.NextGC))
+ sysStatus.LastGCTime = time.Unix(0, int64(m.LastGC)).Format(time.RFC3339)
+ sysStatus.PauseTotalNs = fmt.Sprintf("%.1fs", float64(m.PauseTotalNs)/1000/1000/1000)
+ sysStatus.PauseNs = fmt.Sprintf("%.3fs", float64(m.PauseNs[(m.NumGC+255)%256])/1000/1000/1000)
+ sysStatus.NumGC = m.NumGC
+}
+
+func prepareDeprecatedWarningsAlert(ctx *context.Context) {
+ if len(setting.DeprecatedWarnings) > 0 {
+ content := setting.DeprecatedWarnings[0]
+ if len(setting.DeprecatedWarnings) > 1 {
+ content += fmt.Sprintf(" (and %d more)", len(setting.DeprecatedWarnings)-1)
+ }
+ ctx.Flash.Error(content, true)
+ }
+}
+
+// Dashboard show admin panel dashboard
+func Dashboard(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.dashboard")
+ ctx.Data["PageIsAdminDashboard"] = true
+ ctx.Data["NeedUpdate"] = updatechecker.GetNeedUpdate(ctx)
+ ctx.Data["RemoteVersion"] = updatechecker.GetRemoteVersion(ctx)
+ updateSystemStatus()
+ ctx.Data["SysStatus"] = sysStatus
+ ctx.Data["SSH"] = setting.SSH
+ prepareDeprecatedWarningsAlert(ctx)
+ ctx.HTML(http.StatusOK, tplDashboard)
+}
+
+func SystemStatus(ctx *context.Context) {
+ updateSystemStatus()
+ ctx.Data["SysStatus"] = sysStatus
+ ctx.HTML(http.StatusOK, tplSystemStatus)
+}
+
+// DashboardPost run an admin operation
+func DashboardPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AdminDashboardForm)
+ ctx.Data["Title"] = ctx.Tr("admin.dashboard")
+ ctx.Data["PageIsAdminDashboard"] = true
+ updateSystemStatus()
+ ctx.Data["SysStatus"] = sysStatus
+
+ // Run operation.
+ if form.Op != "" {
+ switch form.Op {
+ case "sync_repo_branches":
+ go func() {
+ if err := repo_service.AddAllRepoBranchesToSyncQueue(graceful.GetManager().ShutdownContext()); err != nil {
+ log.Error("AddAllRepoBranchesToSyncQueue: %v: %v", ctx.Doer.ID, err)
+ }
+ }()
+ ctx.Flash.Success(ctx.Tr("admin.dashboard.sync_branch.started"))
+ case "sync_repo_tags":
+ go func() {
+ if err := release_service.AddAllRepoTagsToSyncQueue(graceful.GetManager().ShutdownContext()); err != nil {
+ log.Error("AddAllRepoTagsToSyncQueue: %v: %v", ctx.Doer.ID, err)
+ }
+ }()
+ ctx.Flash.Success(ctx.Tr("admin.dashboard.sync_tag.started"))
+ default:
+ task := cron.GetTask(form.Op)
+ if task != nil {
+ go task.RunWithUser(ctx.Doer, nil)
+ ctx.Flash.Success(ctx.Tr("admin.dashboard.task.started", ctx.Tr("admin.dashboard."+form.Op)))
+ } else {
+ ctx.Flash.Error(ctx.Tr("admin.dashboard.task.unknown", form.Op))
+ }
+ }
+ }
+ if form.From == "monitor" {
+ ctx.Redirect(setting.AppSubURL + "/admin/monitor/cron")
+ } else {
+ ctx.Redirect(setting.AppSubURL + "/admin")
+ }
+}
+
+func SelfCheck(ctx *context.Context) {
+ ctx.Data["PageIsAdminSelfCheck"] = true
+ r, err := db.CheckCollationsDefaultEngine()
+ if err != nil {
+ ctx.Flash.Error(fmt.Sprintf("CheckCollationsDefaultEngine: %v", err), true)
+ }
+
+ if r != nil {
+ ctx.Data["DatabaseType"] = setting.Database.Type
+ ctx.Data["DatabaseCheckResult"] = r
+ hasProblem := false
+ if !r.CollationEquals(r.DatabaseCollation, r.ExpectedCollation) {
+ ctx.Data["DatabaseCheckCollationMismatch"] = true
+ hasProblem = true
+ }
+ if !r.IsCollationCaseSensitive(r.DatabaseCollation) {
+ ctx.Data["DatabaseCheckCollationCaseInsensitive"] = true
+ hasProblem = true
+ }
+ ctx.Data["DatabaseCheckInconsistentCollationColumns"] = r.InconsistentCollationColumns
+ hasProblem = hasProblem || len(r.InconsistentCollationColumns) > 0
+
+ ctx.Data["DatabaseCheckHasProblems"] = hasProblem
+ }
+
+ elapsed, err := cache.Test()
+ if err != nil {
+ ctx.Data["CacheError"] = err
+ } else if elapsed > cache.SlowCacheThreshold {
+ ctx.Data["CacheSlow"] = fmt.Sprint(elapsed)
+ }
+
+ ctx.HTML(http.StatusOK, tplSelfCheck)
+}
+
+func CronTasks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.monitor.cron")
+ ctx.Data["PageIsAdminMonitorCron"] = true
+ ctx.Data["Entries"] = cron.ListTasks()
+ ctx.HTML(http.StatusOK, tplCron)
+}
+
+func MonitorStats(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.monitor.stats")
+ ctx.Data["PageIsAdminMonitorStats"] = true
+ modelStats := activities_model.GetStatistic(ctx).Counter
+ stats := map[string]any{}
+
+ // To avoid manually converting the values of the stats struct to an map,
+ // and to avoid using JSON to do this for us (JSON encoder converts numbers to
+ // scientific notation). Use reflect to convert the struct to an map.
+ rv := reflect.ValueOf(modelStats)
+ for i := 0; i < rv.NumField(); i++ {
+ field := rv.Field(i)
+ // Preserve old behavior, do not show arrays that are empty.
+ if field.Kind() == reflect.Slice && field.Len() == 0 {
+ continue
+ }
+ stats[rv.Type().Field(i).Name] = field.Interface()
+ }
+
+ ctx.Data["Stats"] = stats
+ ctx.HTML(http.StatusOK, tplStats)
+}
diff --git a/routers/web/admin/admin_test.go b/routers/web/admin/admin_test.go
new file mode 100644
index 0000000..3518869
--- /dev/null
+++ b/routers/web/admin/admin_test.go
@@ -0,0 +1,117 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "testing"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestShadowPassword(t *testing.T) {
+ kases := []struct {
+ Provider string
+ CfgItem string
+ Result string
+ }{
+ {
+ Provider: "redis",
+ CfgItem: "network=tcp,addr=:6379,password=gitea,db=0,pool_size=100,idle_timeout=180",
+ Result: "network=tcp,addr=:6379,password=******,db=0,pool_size=100,idle_timeout=180",
+ },
+ {
+ Provider: "mysql",
+ CfgItem: "root:@tcp(localhost:3306)/gitea?charset=utf8",
+ Result: "root:******@tcp(localhost:3306)/gitea?charset=utf8",
+ },
+ {
+ Provider: "mysql",
+ CfgItem: "/gitea?charset=utf8",
+ Result: "/gitea?charset=utf8",
+ },
+ {
+ Provider: "mysql",
+ CfgItem: "user:mypassword@/dbname",
+ Result: "user:******@/dbname",
+ },
+ {
+ Provider: "postgres",
+ CfgItem: "user=pqgotest dbname=pqgotest sslmode=verify-full",
+ Result: "user=pqgotest dbname=pqgotest sslmode=verify-full",
+ },
+ {
+ Provider: "postgres",
+ CfgItem: "user=pqgotest password= dbname=pqgotest sslmode=verify-full",
+ Result: "user=pqgotest password=****** dbname=pqgotest sslmode=verify-full",
+ },
+ {
+ Provider: "postgres",
+ CfgItem: "postgres://user:pass@hostname/dbname",
+ Result: "postgres://user:******@hostname/dbname",
+ },
+ {
+ Provider: "couchbase",
+ CfgItem: "http://dev-couchbase.example.com:8091/",
+ Result: "http://dev-couchbase.example.com:8091/",
+ },
+ {
+ Provider: "couchbase",
+ CfgItem: "http://user:the_password@dev-couchbase.example.com:8091/",
+ Result: "http://user:******@dev-couchbase.example.com:8091/",
+ },
+ }
+
+ for _, k := range kases {
+ assert.EqualValues(t, k.Result, shadowPassword(k.Provider, k.CfgItem))
+ }
+}
+
+func TestMonitorStats(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ t.Run("Normal", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Metrics.EnabledIssueByLabel, false)()
+ defer test.MockVariableValue(&setting.Metrics.EnabledIssueByRepository, false)()
+
+ ctx, _ := contexttest.MockContext(t, "admin/stats")
+ MonitorStats(ctx)
+
+ // Test some of the stats manually.
+ mappedStats := ctx.Data["Stats"].(map[string]any)
+ stats := activities_model.GetStatistic(ctx).Counter
+
+ assert.EqualValues(t, stats.Comment, mappedStats["Comment"])
+ assert.EqualValues(t, stats.Issue, mappedStats["Issue"])
+ assert.EqualValues(t, stats.User, mappedStats["User"])
+ assert.EqualValues(t, stats.Milestone, mappedStats["Milestone"])
+
+ // Ensure that these aren't set.
+ assert.Empty(t, stats.IssueByLabel)
+ assert.Empty(t, stats.IssueByRepository)
+ assert.Nil(t, mappedStats["IssueByLabel"])
+ assert.Nil(t, mappedStats["IssueByRepository"])
+ })
+
+ t.Run("IssueByX", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Metrics.EnabledIssueByLabel, true)()
+ defer test.MockVariableValue(&setting.Metrics.EnabledIssueByRepository, true)()
+
+ ctx, _ := contexttest.MockContext(t, "admin/stats")
+ MonitorStats(ctx)
+
+ mappedStats := ctx.Data["Stats"].(map[string]any)
+ stats := activities_model.GetStatistic(ctx).Counter
+
+ assert.NotEmpty(t, stats.IssueByLabel)
+ assert.NotEmpty(t, stats.IssueByRepository)
+ assert.EqualValues(t, stats.IssueByLabel, mappedStats["IssueByLabel"])
+ assert.EqualValues(t, stats.IssueByRepository, mappedStats["IssueByRepository"])
+ })
+}
diff --git a/routers/web/admin/applications.go b/routers/web/admin/applications.go
new file mode 100644
index 0000000..8583398
--- /dev/null
+++ b/routers/web/admin/applications.go
@@ -0,0 +1,90 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ user_setting "code.gitea.io/gitea/routers/web/user/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+var (
+ tplSettingsApplications base.TplName = "admin/applications/list"
+ tplSettingsOauth2ApplicationEdit base.TplName = "admin/applications/oauth2_edit"
+)
+
+func newOAuth2CommonHandlers() *user_setting.OAuth2CommonHandlers {
+ return &user_setting.OAuth2CommonHandlers{
+ OwnerID: 0,
+ BasePathList: fmt.Sprintf("%s/admin/applications", setting.AppSubURL),
+ BasePathEditPrefix: fmt.Sprintf("%s/admin/applications/oauth2", setting.AppSubURL),
+ TplAppEdit: tplSettingsOauth2ApplicationEdit,
+ }
+}
+
+// Applications render org applications page (for org, at the moment, there are only OAuth2 applications)
+func Applications(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsAdminApplications"] = true
+
+ apps, err := db.Find[auth.OAuth2Application](ctx, auth.FindOAuth2ApplicationsOptions{
+ IsGlobal: true,
+ })
+ if err != nil {
+ ctx.ServerError("GetOAuth2ApplicationsByUserID", err)
+ return
+ }
+ ctx.Data["Applications"] = apps
+ ctx.Data["BuiltinApplications"] = auth.BuiltinApplications()
+ ctx.HTML(http.StatusOK, tplSettingsApplications)
+}
+
+// ApplicationsPost response for adding an oauth2 application
+func ApplicationsPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsAdminApplications"] = true
+
+ oa := newOAuth2CommonHandlers()
+ oa.AddApp(ctx)
+}
+
+// EditApplication displays the given application
+func EditApplication(ctx *context.Context) {
+ ctx.Data["PageIsAdminApplications"] = true
+
+ oa := newOAuth2CommonHandlers()
+ oa.EditShow(ctx)
+}
+
+// EditApplicationPost response for editing oauth2 application
+func EditApplicationPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsAdminApplications"] = true
+
+ oa := newOAuth2CommonHandlers()
+ oa.EditSave(ctx)
+}
+
+// ApplicationsRegenerateSecret handles the post request for regenerating the secret
+func ApplicationsRegenerateSecret(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsAdminApplications"] = true
+
+ oa := newOAuth2CommonHandlers()
+ oa.RegenerateSecret(ctx)
+}
+
+// DeleteApplication deletes the given oauth2 application
+func DeleteApplication(ctx *context.Context) {
+ oa := newOAuth2CommonHandlers()
+ oa.DeleteApp(ctx)
+}
+
+// TODO: revokes the grant with the given id
diff --git a/routers/web/admin/auths.go b/routers/web/admin/auths.go
new file mode 100644
index 0000000..799b7e8
--- /dev/null
+++ b/routers/web/admin/auths.go
@@ -0,0 +1,465 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/auth/pam"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ auth_service "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/ldap"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ pam_service "code.gitea.io/gitea/services/auth/source/pam"
+ "code.gitea.io/gitea/services/auth/source/smtp"
+ "code.gitea.io/gitea/services/auth/source/sspi"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+
+ "xorm.io/xorm/convert"
+)
+
+const (
+ tplAuths base.TplName = "admin/auth/list"
+ tplAuthNew base.TplName = "admin/auth/new"
+ tplAuthEdit base.TplName = "admin/auth/edit"
+)
+
+var (
+ separatorAntiPattern = regexp.MustCompile(`[^\w-\.]`)
+ langCodePattern = regexp.MustCompile(`^[a-z]{2}-[A-Z]{2}$`)
+)
+
+// Authentications show authentication config page
+func Authentications(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.authentication")
+ ctx.Data["PageIsAdminAuthentications"] = true
+
+ var err error
+ ctx.Data["Sources"], ctx.Data["Total"], err = db.FindAndCount[auth.Source](ctx, auth.FindSourcesOptions{})
+ if err != nil {
+ ctx.ServerError("auth.Sources", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplAuths)
+}
+
+type dropdownItem struct {
+ Name string
+ Type any
+}
+
+var (
+ authSources = func() []dropdownItem {
+ items := []dropdownItem{
+ {auth.LDAP.String(), auth.LDAP},
+ {auth.DLDAP.String(), auth.DLDAP},
+ {auth.SMTP.String(), auth.SMTP},
+ {auth.OAuth2.String(), auth.OAuth2},
+ {auth.SSPI.String(), auth.SSPI},
+ }
+ if pam.Supported {
+ items = append(items, dropdownItem{auth.Names[auth.PAM], auth.PAM})
+ }
+ return items
+ }()
+
+ securityProtocols = []dropdownItem{
+ {ldap.SecurityProtocolNames[ldap.SecurityProtocolUnencrypted], ldap.SecurityProtocolUnencrypted},
+ {ldap.SecurityProtocolNames[ldap.SecurityProtocolLDAPS], ldap.SecurityProtocolLDAPS},
+ {ldap.SecurityProtocolNames[ldap.SecurityProtocolStartTLS], ldap.SecurityProtocolStartTLS},
+ }
+)
+
+// NewAuthSource render adding a new auth source page
+func NewAuthSource(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.auths.new")
+ ctx.Data["PageIsAdminAuthentications"] = true
+
+ ctx.Data["type"] = auth.LDAP.Int()
+ ctx.Data["CurrentTypeName"] = auth.Names[auth.LDAP]
+ ctx.Data["CurrentSecurityProtocol"] = ldap.SecurityProtocolNames[ldap.SecurityProtocolUnencrypted]
+ ctx.Data["smtp_auth"] = "PLAIN"
+ ctx.Data["is_active"] = true
+ ctx.Data["is_sync_enabled"] = true
+ ctx.Data["AuthSources"] = authSources
+ ctx.Data["SecurityProtocols"] = securityProtocols
+ ctx.Data["SMTPAuths"] = smtp.Authenticators
+ oauth2providers := oauth2.GetSupportedOAuth2Providers()
+ ctx.Data["OAuth2Providers"] = oauth2providers
+
+ ctx.Data["SSPIAutoCreateUsers"] = true
+ ctx.Data["SSPIAutoActivateUsers"] = true
+ ctx.Data["SSPIStripDomainNames"] = true
+ ctx.Data["SSPISeparatorReplacement"] = "_"
+ ctx.Data["SSPIDefaultLanguage"] = ""
+
+ // only the first as default
+ ctx.Data["oauth2_provider"] = oauth2providers[0].Name()
+
+ ctx.HTML(http.StatusOK, tplAuthNew)
+}
+
+func parseLDAPConfig(form forms.AuthenticationForm) *ldap.Source {
+ var pageSize uint32
+ if form.UsePagedSearch {
+ pageSize = uint32(form.SearchPageSize)
+ }
+ return &ldap.Source{
+ Name: form.Name,
+ Host: form.Host,
+ Port: form.Port,
+ SecurityProtocol: ldap.SecurityProtocol(form.SecurityProtocol),
+ SkipVerify: form.SkipVerify,
+ BindDN: form.BindDN,
+ UserDN: form.UserDN,
+ BindPassword: form.BindPassword,
+ UserBase: form.UserBase,
+ DefaultDomainName: form.DefaultDomainName,
+ AttributeUsername: form.AttributeUsername,
+ AttributeName: form.AttributeName,
+ AttributeSurname: form.AttributeSurname,
+ AttributeMail: form.AttributeMail,
+ AttributesInBind: form.AttributesInBind,
+ AttributeSSHPublicKey: form.AttributeSSHPublicKey,
+ AttributeAvatar: form.AttributeAvatar,
+ SearchPageSize: pageSize,
+ Filter: form.Filter,
+ GroupsEnabled: form.GroupsEnabled,
+ GroupDN: form.GroupDN,
+ GroupFilter: form.GroupFilter,
+ GroupMemberUID: form.GroupMemberUID,
+ GroupTeamMap: form.GroupTeamMap,
+ GroupTeamMapRemoval: form.GroupTeamMapRemoval,
+ UserUID: form.UserUID,
+ AdminFilter: form.AdminFilter,
+ RestrictedFilter: form.RestrictedFilter,
+ AllowDeactivateAll: form.AllowDeactivateAll,
+ Enabled: true,
+ SkipLocalTwoFA: form.SkipLocalTwoFA,
+ }
+}
+
+func parseSMTPConfig(form forms.AuthenticationForm) *smtp.Source {
+ return &smtp.Source{
+ Auth: form.SMTPAuth,
+ Host: form.SMTPHost,
+ Port: form.SMTPPort,
+ AllowedDomains: form.AllowedDomains,
+ ForceSMTPS: form.ForceSMTPS,
+ SkipVerify: form.SkipVerify,
+ HeloHostname: form.HeloHostname,
+ DisableHelo: form.DisableHelo,
+ SkipLocalTwoFA: form.SkipLocalTwoFA,
+ }
+}
+
+func parseOAuth2Config(form forms.AuthenticationForm) *oauth2.Source {
+ var customURLMapping *oauth2.CustomURLMapping
+ if form.Oauth2UseCustomURL {
+ customURLMapping = &oauth2.CustomURLMapping{
+ TokenURL: form.Oauth2TokenURL,
+ AuthURL: form.Oauth2AuthURL,
+ ProfileURL: form.Oauth2ProfileURL,
+ EmailURL: form.Oauth2EmailURL,
+ Tenant: form.Oauth2Tenant,
+ }
+ } else {
+ customURLMapping = nil
+ }
+ var scopes []string
+ for _, s := range strings.Split(form.Oauth2Scopes, ",") {
+ s = strings.TrimSpace(s)
+ if s != "" {
+ scopes = append(scopes, s)
+ }
+ }
+
+ return &oauth2.Source{
+ Provider: form.Oauth2Provider,
+ ClientID: form.Oauth2Key,
+ ClientSecret: form.Oauth2Secret,
+ OpenIDConnectAutoDiscoveryURL: form.OpenIDConnectAutoDiscoveryURL,
+ CustomURLMapping: customURLMapping,
+ IconURL: form.Oauth2IconURL,
+ Scopes: scopes,
+ RequiredClaimName: form.Oauth2RequiredClaimName,
+ RequiredClaimValue: form.Oauth2RequiredClaimValue,
+ SkipLocalTwoFA: form.SkipLocalTwoFA,
+ GroupClaimName: form.Oauth2GroupClaimName,
+ RestrictedGroup: form.Oauth2RestrictedGroup,
+ AdminGroup: form.Oauth2AdminGroup,
+ GroupTeamMap: form.Oauth2GroupTeamMap,
+ GroupTeamMapRemoval: form.Oauth2GroupTeamMapRemoval,
+ }
+}
+
+func parseSSPIConfig(ctx *context.Context, form forms.AuthenticationForm) (*sspi.Source, error) {
+ if util.IsEmptyString(form.SSPISeparatorReplacement) {
+ ctx.Data["Err_SSPISeparatorReplacement"] = true
+ return nil, errors.New(ctx.Locale.TrString("form.SSPISeparatorReplacement") + ctx.Locale.TrString("form.require_error"))
+ }
+ if separatorAntiPattern.MatchString(form.SSPISeparatorReplacement) {
+ ctx.Data["Err_SSPISeparatorReplacement"] = true
+ return nil, errors.New(ctx.Locale.TrString("form.SSPISeparatorReplacement") + ctx.Locale.TrString("form.alpha_dash_dot_error"))
+ }
+
+ if form.SSPIDefaultLanguage != "" && !langCodePattern.MatchString(form.SSPIDefaultLanguage) {
+ ctx.Data["Err_SSPIDefaultLanguage"] = true
+ return nil, errors.New(ctx.Locale.TrString("form.lang_select_error"))
+ }
+
+ return &sspi.Source{
+ AutoCreateUsers: form.SSPIAutoCreateUsers,
+ AutoActivateUsers: form.SSPIAutoActivateUsers,
+ StripDomainNames: form.SSPIStripDomainNames,
+ SeparatorReplacement: form.SSPISeparatorReplacement,
+ DefaultLanguage: form.SSPIDefaultLanguage,
+ }, nil
+}
+
+// NewAuthSourcePost response for adding an auth source
+func NewAuthSourcePost(ctx *context.Context) {
+ form := *web.GetForm(ctx).(*forms.AuthenticationForm)
+ ctx.Data["Title"] = ctx.Tr("admin.auths.new")
+ ctx.Data["PageIsAdminAuthentications"] = true
+
+ ctx.Data["CurrentTypeName"] = auth.Type(form.Type).String()
+ ctx.Data["CurrentSecurityProtocol"] = ldap.SecurityProtocolNames[ldap.SecurityProtocol(form.SecurityProtocol)]
+ ctx.Data["AuthSources"] = authSources
+ ctx.Data["SecurityProtocols"] = securityProtocols
+ ctx.Data["SMTPAuths"] = smtp.Authenticators
+ oauth2providers := oauth2.GetSupportedOAuth2Providers()
+ ctx.Data["OAuth2Providers"] = oauth2providers
+
+ ctx.Data["SSPIAutoCreateUsers"] = true
+ ctx.Data["SSPIAutoActivateUsers"] = true
+ ctx.Data["SSPIStripDomainNames"] = true
+ ctx.Data["SSPISeparatorReplacement"] = "_"
+ ctx.Data["SSPIDefaultLanguage"] = ""
+
+ hasTLS := false
+ var config convert.Conversion
+ switch auth.Type(form.Type) {
+ case auth.LDAP, auth.DLDAP:
+ config = parseLDAPConfig(form)
+ hasTLS = ldap.SecurityProtocol(form.SecurityProtocol) > ldap.SecurityProtocolUnencrypted
+ case auth.SMTP:
+ config = parseSMTPConfig(form)
+ hasTLS = true
+ case auth.PAM:
+ config = &pam_service.Source{
+ ServiceName: form.PAMServiceName,
+ EmailDomain: form.PAMEmailDomain,
+ SkipLocalTwoFA: form.SkipLocalTwoFA,
+ }
+ case auth.OAuth2:
+ config = parseOAuth2Config(form)
+ oauth2Config := config.(*oauth2.Source)
+ if oauth2Config.Provider == "openidConnect" {
+ discoveryURL, err := url.Parse(oauth2Config.OpenIDConnectAutoDiscoveryURL)
+ if err != nil || (discoveryURL.Scheme != "http" && discoveryURL.Scheme != "https") {
+ ctx.Data["Err_DiscoveryURL"] = true
+ ctx.RenderWithErr(ctx.Tr("admin.auths.invalid_openIdConnectAutoDiscoveryURL"), tplAuthNew, form)
+ return
+ }
+ }
+ case auth.SSPI:
+ var err error
+ config, err = parseSSPIConfig(ctx, form)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplAuthNew, form)
+ return
+ }
+ existing, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{LoginType: auth.SSPI})
+ if err != nil || len(existing) > 0 {
+ ctx.Data["Err_Type"] = true
+ ctx.RenderWithErr(ctx.Tr("admin.auths.login_source_of_type_exist"), tplAuthNew, form)
+ return
+ }
+ default:
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+ ctx.Data["HasTLS"] = hasTLS
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplAuthNew)
+ return
+ }
+
+ if err := auth.CreateSource(ctx, &auth.Source{
+ Type: auth.Type(form.Type),
+ Name: form.Name,
+ IsActive: form.IsActive,
+ IsSyncEnabled: form.IsSyncEnabled,
+ Cfg: config,
+ }); err != nil {
+ if auth.IsErrSourceAlreadyExist(err) {
+ ctx.Data["Err_Name"] = true
+ ctx.RenderWithErr(ctx.Tr("admin.auths.login_source_exist", err.(auth.ErrSourceAlreadyExist).Name), tplAuthNew, form)
+ } else if oauth2.IsErrOpenIDConnectInitialize(err) {
+ ctx.Data["Err_DiscoveryURL"] = true
+ unwrapped := err.(oauth2.ErrOpenIDConnectInitialize).Unwrap()
+ ctx.RenderWithErr(ctx.Tr("admin.auths.unable_to_initialize_openid", unwrapped), tplAuthNew, form)
+ } else {
+ ctx.ServerError("auth.CreateSource", err)
+ }
+ return
+ }
+
+ log.Trace("Authentication created by admin(%s): %s", ctx.Doer.Name, form.Name)
+
+ ctx.Flash.Success(ctx.Tr("admin.auths.new_success", form.Name))
+ ctx.Redirect(setting.AppSubURL + "/admin/auths")
+}
+
+// EditAuthSource render editing auth source page
+func EditAuthSource(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.auths.edit")
+ ctx.Data["PageIsAdminAuthentications"] = true
+
+ ctx.Data["SecurityProtocols"] = securityProtocols
+ ctx.Data["SMTPAuths"] = smtp.Authenticators
+ oauth2providers := oauth2.GetSupportedOAuth2Providers()
+ ctx.Data["OAuth2Providers"] = oauth2providers
+
+ source, err := auth.GetSourceByID(ctx, ctx.ParamsInt64(":authid"))
+ if err != nil {
+ ctx.ServerError("auth.GetSourceByID", err)
+ return
+ }
+ ctx.Data["Source"] = source
+ ctx.Data["HasTLS"] = source.HasTLS()
+
+ if source.IsOAuth2() {
+ type Named interface {
+ Name() string
+ }
+
+ for _, provider := range oauth2providers {
+ if provider.Name() == source.Cfg.(Named).Name() {
+ ctx.Data["CurrentOAuth2Provider"] = provider
+ break
+ }
+ }
+ }
+
+ ctx.HTML(http.StatusOK, tplAuthEdit)
+}
+
+// EditAuthSourcePost response for editing auth source
+func EditAuthSourcePost(ctx *context.Context) {
+ form := *web.GetForm(ctx).(*forms.AuthenticationForm)
+ ctx.Data["Title"] = ctx.Tr("admin.auths.edit")
+ ctx.Data["PageIsAdminAuthentications"] = true
+
+ ctx.Data["SMTPAuths"] = smtp.Authenticators
+ oauth2providers := oauth2.GetSupportedOAuth2Providers()
+ ctx.Data["OAuth2Providers"] = oauth2providers
+
+ source, err := auth.GetSourceByID(ctx, ctx.ParamsInt64(":authid"))
+ if err != nil {
+ ctx.ServerError("auth.GetSourceByID", err)
+ return
+ }
+ ctx.Data["Source"] = source
+ ctx.Data["HasTLS"] = source.HasTLS()
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplAuthEdit)
+ return
+ }
+
+ var config convert.Conversion
+ switch auth.Type(form.Type) {
+ case auth.LDAP, auth.DLDAP:
+ config = parseLDAPConfig(form)
+ case auth.SMTP:
+ config = parseSMTPConfig(form)
+ case auth.PAM:
+ config = &pam_service.Source{
+ ServiceName: form.PAMServiceName,
+ EmailDomain: form.PAMEmailDomain,
+ }
+ case auth.OAuth2:
+ config = parseOAuth2Config(form)
+ oauth2Config := config.(*oauth2.Source)
+ if oauth2Config.Provider == "openidConnect" {
+ discoveryURL, err := url.Parse(oauth2Config.OpenIDConnectAutoDiscoveryURL)
+ if err != nil || (discoveryURL.Scheme != "http" && discoveryURL.Scheme != "https") {
+ ctx.Data["Err_DiscoveryURL"] = true
+ ctx.RenderWithErr(ctx.Tr("admin.auths.invalid_openIdConnectAutoDiscoveryURL"), tplAuthEdit, form)
+ return
+ }
+ }
+ case auth.SSPI:
+ config, err = parseSSPIConfig(ctx, form)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplAuthEdit, form)
+ return
+ }
+ default:
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ source.Name = form.Name
+ source.IsActive = form.IsActive
+ source.IsSyncEnabled = form.IsSyncEnabled
+ source.Cfg = config
+ if err := auth.UpdateSource(ctx, source); err != nil {
+ if auth.IsErrSourceAlreadyExist(err) {
+ ctx.Data["Err_Name"] = true
+ ctx.RenderWithErr(ctx.Tr("admin.auths.login_source_exist", err.(auth.ErrSourceAlreadyExist).Name), tplAuthEdit, form)
+ } else if oauth2.IsErrOpenIDConnectInitialize(err) {
+ ctx.Flash.Error(err.Error(), true)
+ ctx.Data["Err_DiscoveryURL"] = true
+ ctx.HTML(http.StatusOK, tplAuthEdit)
+ } else {
+ ctx.ServerError("UpdateSource", err)
+ }
+ return
+ }
+ log.Trace("Authentication changed by admin(%s): %d", ctx.Doer.Name, source.ID)
+
+ ctx.Flash.Success(ctx.Tr("admin.auths.update_success"))
+ ctx.Redirect(setting.AppSubURL + "/admin/auths/" + strconv.FormatInt(form.ID, 10))
+}
+
+// DeleteAuthSource response for deleting an auth source
+func DeleteAuthSource(ctx *context.Context) {
+ source, err := auth.GetSourceByID(ctx, ctx.ParamsInt64(":authid"))
+ if err != nil {
+ ctx.ServerError("auth.GetSourceByID", err)
+ return
+ }
+
+ if err = auth_service.DeleteSource(ctx, source); err != nil {
+ if auth.IsErrSourceInUse(err) {
+ ctx.Flash.Error(ctx.Tr("admin.auths.still_in_used"))
+ } else {
+ ctx.Flash.Error(fmt.Sprintf("auth_service.DeleteSource: %v", err))
+ }
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/auths/" + url.PathEscape(ctx.Params(":authid")))
+ return
+ }
+ log.Trace("Authentication deleted by admin(%s): %d", ctx.Doer.Name, source.ID)
+
+ ctx.Flash.Success(ctx.Tr("admin.auths.deletion_success"))
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/auths")
+}
diff --git a/routers/web/admin/config.go b/routers/web/admin/config.go
new file mode 100644
index 0000000..06d0ea6
--- /dev/null
+++ b/routers/web/admin/config.go
@@ -0,0 +1,255 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/setting/config"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/mailer"
+
+ "code.forgejo.org/go-chi/session"
+)
+
+const (
+ tplConfig base.TplName = "admin/config"
+ tplConfigSettings base.TplName = "admin/config_settings"
+)
+
+// SendTestMail send test mail to confirm mail service is OK
+func SendTestMail(ctx *context.Context) {
+ email := ctx.FormString("email")
+ // Send a test email to the user's email address and redirect back to Config
+ if err := mailer.SendTestMail(email); err != nil {
+ ctx.Flash.Error(ctx.Tr("admin.config.test_mail_failed", email, err))
+ } else {
+ ctx.Flash.Info(ctx.Tr("admin.config.test_mail_sent", email))
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/admin/config")
+}
+
+// TestCache test the cache settings
+func TestCache(ctx *context.Context) {
+ elapsed, err := cache.Test()
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("admin.config.cache_test_failed", err))
+ } else {
+ if elapsed > cache.SlowCacheThreshold {
+ ctx.Flash.Warning(ctx.Tr("admin.config.cache_test_slow", elapsed))
+ } else {
+ ctx.Flash.Info(ctx.Tr("admin.config.cache_test_succeeded", elapsed))
+ }
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/admin/config")
+}
+
+func shadowPasswordKV(cfgItem, splitter string) string {
+ fields := strings.Split(cfgItem, splitter)
+ for i := 0; i < len(fields); i++ {
+ if strings.HasPrefix(fields[i], "password=") {
+ fields[i] = "password=******"
+ break
+ }
+ }
+ return strings.Join(fields, splitter)
+}
+
+func shadowURL(provider, cfgItem string) string {
+ u, err := url.Parse(cfgItem)
+ if err != nil {
+ log.Error("Shadowing Password for %v failed: %v", provider, err)
+ return cfgItem
+ }
+ if u.User != nil {
+ atIdx := strings.Index(cfgItem, "@")
+ if atIdx > 0 {
+ colonIdx := strings.LastIndex(cfgItem[:atIdx], ":")
+ if colonIdx > 0 {
+ return cfgItem[:colonIdx+1] + "******" + cfgItem[atIdx:]
+ }
+ }
+ }
+ return cfgItem
+}
+
+func shadowPassword(provider, cfgItem string) string {
+ switch provider {
+ case "redis":
+ return shadowPasswordKV(cfgItem, ",")
+ case "mysql":
+ // root:@tcp(localhost:3306)/macaron?charset=utf8
+ atIdx := strings.Index(cfgItem, "@")
+ if atIdx > 0 {
+ colonIdx := strings.Index(cfgItem[:atIdx], ":")
+ if colonIdx > 0 {
+ return cfgItem[:colonIdx+1] + "******" + cfgItem[atIdx:]
+ }
+ }
+ return cfgItem
+ case "postgres":
+ // user=jiahuachen dbname=macaron port=5432 sslmode=disable
+ if !strings.HasPrefix(cfgItem, "postgres://") {
+ return shadowPasswordKV(cfgItem, " ")
+ }
+ fallthrough
+ case "couchbase":
+ return shadowURL(provider, cfgItem)
+ // postgres://pqgotest:password@localhost/pqgotest?sslmode=verify-full
+ // Notice: use shadowURL
+ }
+ return cfgItem
+}
+
+// Config show admin config page
+func Config(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.config_summary")
+ ctx.Data["PageIsAdminConfig"] = true
+ ctx.Data["PageIsAdminConfigSummary"] = true
+
+ ctx.Data["CustomConf"] = setting.CustomConf
+ ctx.Data["AppUrl"] = setting.AppURL
+ ctx.Data["AppBuiltWith"] = setting.AppBuiltWith
+ ctx.Data["Domain"] = setting.Domain
+ ctx.Data["OfflineMode"] = setting.OfflineMode
+ ctx.Data["RunUser"] = setting.RunUser
+ ctx.Data["RunMode"] = util.ToTitleCase(setting.RunMode)
+ ctx.Data["GitVersion"] = git.VersionInfo()
+
+ ctx.Data["AppDataPath"] = setting.AppDataPath
+ ctx.Data["RepoRootPath"] = setting.RepoRootPath
+ ctx.Data["CustomRootPath"] = setting.CustomPath
+ ctx.Data["LogRootPath"] = setting.Log.RootPath
+ ctx.Data["ScriptType"] = setting.ScriptType
+ ctx.Data["ReverseProxyAuthUser"] = setting.ReverseProxyAuthUser
+ ctx.Data["ReverseProxyAuthEmail"] = setting.ReverseProxyAuthEmail
+
+ ctx.Data["SSH"] = setting.SSH
+ ctx.Data["LFS"] = setting.LFS
+
+ ctx.Data["Service"] = setting.Service
+ ctx.Data["DbCfg"] = setting.Database
+ ctx.Data["Webhook"] = setting.Webhook
+
+ ctx.Data["MailerEnabled"] = false
+ if setting.MailService != nil {
+ ctx.Data["MailerEnabled"] = true
+ ctx.Data["Mailer"] = setting.MailService
+ }
+
+ ctx.Data["CacheAdapter"] = setting.CacheService.Adapter
+ ctx.Data["CacheInterval"] = setting.CacheService.Interval
+
+ ctx.Data["CacheConn"] = shadowPassword(setting.CacheService.Adapter, setting.CacheService.Conn)
+ ctx.Data["CacheItemTTL"] = setting.CacheService.TTL
+
+ sessionCfg := setting.SessionConfig
+ if sessionCfg.Provider == "VirtualSession" {
+ var realSession session.Options
+ if err := json.Unmarshal([]byte(sessionCfg.ProviderConfig), &realSession); err != nil {
+ log.Error("Unable to unmarshall session config for virtual provider config: %s\nError: %v", sessionCfg.ProviderConfig, err)
+ }
+ sessionCfg.Provider = realSession.Provider
+ sessionCfg.ProviderConfig = realSession.ProviderConfig
+ sessionCfg.CookieName = realSession.CookieName
+ sessionCfg.CookiePath = realSession.CookiePath
+ sessionCfg.Gclifetime = realSession.Gclifetime
+ sessionCfg.Maxlifetime = realSession.Maxlifetime
+ sessionCfg.Secure = realSession.Secure
+ sessionCfg.Domain = realSession.Domain
+ }
+ sessionCfg.ProviderConfig = shadowPassword(sessionCfg.Provider, sessionCfg.ProviderConfig)
+ ctx.Data["SessionConfig"] = sessionCfg
+
+ ctx.Data["Git"] = setting.Git
+ ctx.Data["AccessLogTemplate"] = setting.Log.AccessLogTemplate
+ ctx.Data["LogSQL"] = setting.Database.LogSQL
+
+ ctx.Data["Loggers"] = log.GetManager().DumpLoggers()
+ config.GetDynGetter().InvalidateCache()
+ prepareDeprecatedWarningsAlert(ctx)
+
+ ctx.HTML(http.StatusOK, tplConfig)
+}
+
+func ConfigSettings(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.config_settings")
+ ctx.Data["PageIsAdminConfig"] = true
+ ctx.Data["PageIsAdminConfigSettings"] = true
+ ctx.Data["DefaultOpenWithEditorAppsString"] = setting.DefaultOpenWithEditorApps().ToTextareaString()
+ ctx.HTML(http.StatusOK, tplConfigSettings)
+}
+
+func ChangeConfig(ctx *context.Context) {
+ key := strings.TrimSpace(ctx.FormString("key"))
+ value := ctx.FormString("value")
+ cfg := setting.Config()
+
+ marshalBool := func(v string) (string, error) { //nolint:unparam
+ if b, _ := strconv.ParseBool(v); b {
+ return "true", nil
+ }
+ return "false", nil
+ }
+ marshalOpenWithApps := func(value string) (string, error) {
+ lines := strings.Split(value, "\n")
+ var openWithEditorApps setting.OpenWithEditorAppsType
+ for _, line := range lines {
+ line = strings.TrimSpace(line)
+ if line == "" {
+ continue
+ }
+ displayName, openURL, ok := strings.Cut(line, "=")
+ displayName, openURL = strings.TrimSpace(displayName), strings.TrimSpace(openURL)
+ if !ok || displayName == "" || openURL == "" {
+ continue
+ }
+ openWithEditorApps = append(openWithEditorApps, setting.OpenWithEditorApp{
+ DisplayName: strings.TrimSpace(displayName),
+ OpenURL: strings.TrimSpace(openURL),
+ })
+ }
+ b, err := json.Marshal(openWithEditorApps)
+ if err != nil {
+ return "", err
+ }
+ return string(b), nil
+ }
+ marshallers := map[string]func(string) (string, error){
+ cfg.Picture.DisableGravatar.DynKey(): marshalBool,
+ cfg.Picture.EnableFederatedAvatar.DynKey(): marshalBool,
+ cfg.Repository.OpenWithEditorApps.DynKey(): marshalOpenWithApps,
+ }
+ marshaller, hasMarshaller := marshallers[key]
+ if !hasMarshaller {
+ ctx.JSONError(ctx.Tr("admin.config.set_setting_failed", key))
+ return
+ }
+ marshaledValue, err := marshaller(value)
+ if err != nil {
+ ctx.JSONError(ctx.Tr("admin.config.set_setting_failed", key))
+ return
+ }
+ if err = system_model.SetSettings(ctx, map[string]string{key: marshaledValue}); err != nil {
+ ctx.JSONError(ctx.Tr("admin.config.set_setting_failed", key))
+ return
+ }
+
+ config.GetDynGetter().InvalidateCache()
+ ctx.JSONOK()
+}
diff --git a/routers/web/admin/diagnosis.go b/routers/web/admin/diagnosis.go
new file mode 100644
index 0000000..020554a
--- /dev/null
+++ b/routers/web/admin/diagnosis.go
@@ -0,0 +1,68 @@
+// Copyright 2023 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "archive/zip"
+ "fmt"
+ "runtime/pprof"
+ "time"
+
+ "code.gitea.io/gitea/modules/httplib"
+ "code.gitea.io/gitea/services/context"
+)
+
+func MonitorDiagnosis(ctx *context.Context) {
+ seconds := ctx.FormInt64("seconds")
+ if seconds <= 5 {
+ seconds = 5
+ }
+ if seconds > 300 {
+ seconds = 300
+ }
+
+ httplib.ServeSetHeaders(ctx.Resp, &httplib.ServeHeaderOptions{
+ ContentType: "application/zip",
+ Disposition: "attachment",
+ Filename: fmt.Sprintf("gitea-diagnosis-%s.zip", time.Now().Format("20060102-150405")),
+ })
+
+ zipWriter := zip.NewWriter(ctx.Resp)
+ defer zipWriter.Close()
+
+ f, err := zipWriter.CreateHeader(&zip.FileHeader{Name: "goroutine-before.txt", Method: zip.Deflate, Modified: time.Now()})
+ if err != nil {
+ ctx.ServerError("Failed to create zip file", err)
+ return
+ }
+ _ = pprof.Lookup("goroutine").WriteTo(f, 1)
+
+ f, err = zipWriter.CreateHeader(&zip.FileHeader{Name: "cpu-profile.dat", Method: zip.Deflate, Modified: time.Now()})
+ if err != nil {
+ ctx.ServerError("Failed to create zip file", err)
+ return
+ }
+
+ err = pprof.StartCPUProfile(f)
+ if err == nil {
+ time.Sleep(time.Duration(seconds) * time.Second)
+ pprof.StopCPUProfile()
+ } else {
+ _, _ = f.Write([]byte(err.Error()))
+ }
+
+ f, err = zipWriter.CreateHeader(&zip.FileHeader{Name: "goroutine-after.txt", Method: zip.Deflate, Modified: time.Now()})
+ if err != nil {
+ ctx.ServerError("Failed to create zip file", err)
+ return
+ }
+ _ = pprof.Lookup("goroutine").WriteTo(f, 1)
+
+ f, err = zipWriter.CreateHeader(&zip.FileHeader{Name: "heap.dat", Method: zip.Deflate, Modified: time.Now()})
+ if err != nil {
+ ctx.ServerError("Failed to create zip file", err)
+ return
+ }
+ _ = pprof.Lookup("heap").WriteTo(f, 0)
+}
diff --git a/routers/web/admin/emails.go b/routers/web/admin/emails.go
new file mode 100644
index 0000000..f0d8555
--- /dev/null
+++ b/routers/web/admin/emails.go
@@ -0,0 +1,182 @@
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "bytes"
+ "net/http"
+ "net/url"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/user"
+)
+
+const (
+ tplEmails base.TplName = "admin/emails/list"
+)
+
+// Emails show all emails
+func Emails(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.emails")
+ ctx.Data["PageIsAdminEmails"] = true
+
+ opts := &user_model.SearchEmailOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.Admin.UserPagingNum,
+ Page: ctx.FormInt("page"),
+ },
+ }
+
+ if opts.Page <= 1 {
+ opts.Page = 1
+ }
+
+ type ActiveEmail struct {
+ user_model.SearchEmailResult
+ CanChange bool
+ }
+
+ var (
+ baseEmails []*user_model.SearchEmailResult
+ emails []ActiveEmail
+ count int64
+ err error
+ orderBy user_model.SearchEmailOrderBy
+ )
+
+ ctx.Data["SortType"] = ctx.FormString("sort")
+ switch ctx.FormString("sort") {
+ case "email":
+ orderBy = user_model.SearchEmailOrderByEmail
+ case "reverseemail":
+ orderBy = user_model.SearchEmailOrderByEmailReverse
+ case "username":
+ orderBy = user_model.SearchEmailOrderByName
+ case "reverseusername":
+ orderBy = user_model.SearchEmailOrderByNameReverse
+ default:
+ ctx.Data["SortType"] = "email"
+ orderBy = user_model.SearchEmailOrderByEmail
+ }
+
+ opts.Keyword = ctx.FormTrim("q")
+ opts.SortType = orderBy
+ if len(ctx.FormString("is_activated")) != 0 {
+ opts.IsActivated = optional.Some(ctx.FormBool("activated"))
+ }
+ if len(ctx.FormString("is_primary")) != 0 {
+ opts.IsPrimary = optional.Some(ctx.FormBool("primary"))
+ }
+
+ if len(opts.Keyword) == 0 || isKeywordValid(opts.Keyword) {
+ baseEmails, count, err = user_model.SearchEmails(ctx, opts)
+ if err != nil {
+ ctx.ServerError("SearchEmails", err)
+ return
+ }
+ emails = make([]ActiveEmail, len(baseEmails))
+ for i := range baseEmails {
+ emails[i].SearchEmailResult = *baseEmails[i]
+ // Don't let the admin deactivate its own primary email address
+ // We already know the user is admin
+ emails[i].CanChange = ctx.Doer.ID != emails[i].UID || !emails[i].IsPrimary
+ }
+ }
+ ctx.Data["Keyword"] = opts.Keyword
+ ctx.Data["Total"] = count
+ ctx.Data["Emails"] = emails
+
+ pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplEmails)
+}
+
+var nullByte = []byte{0x00}
+
+func isKeywordValid(keyword string) bool {
+ return !bytes.Contains([]byte(keyword), nullByte)
+}
+
+// ActivateEmail serves a POST request for activating/deactivating a user's email
+func ActivateEmail(ctx *context.Context) {
+ truefalse := map[string]bool{"1": true, "0": false}
+
+ uid := ctx.FormInt64("uid")
+ email := ctx.FormString("email")
+ primary, okp := truefalse[ctx.FormString("primary")]
+ activate, oka := truefalse[ctx.FormString("activate")]
+
+ if uid == 0 || len(email) == 0 || !okp || !oka {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ log.Info("Changing activation for User ID: %d, email: %s, primary: %v to %v", uid, email, primary, activate)
+
+ if err := user_model.ActivateUserEmail(ctx, uid, email, activate); err != nil {
+ log.Error("ActivateUserEmail(%v,%v,%v): %v", uid, email, activate, err)
+ if user_model.IsErrEmailAlreadyUsed(err) {
+ ctx.Flash.Error(ctx.Tr("admin.emails.duplicate_active"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("admin.emails.not_updated", err))
+ }
+ } else {
+ log.Info("Activation for User ID: %d, email: %s, primary: %v changed to %v", uid, email, primary, activate)
+ ctx.Flash.Info(ctx.Tr("admin.emails.updated"))
+ }
+
+ redirect, _ := url.Parse(setting.AppSubURL + "/admin/emails")
+ q := url.Values{}
+ if val := ctx.FormTrim("q"); len(val) > 0 {
+ q.Set("q", val)
+ }
+ if val := ctx.FormTrim("sort"); len(val) > 0 {
+ q.Set("sort", val)
+ }
+ if val := ctx.FormTrim("is_primary"); len(val) > 0 {
+ q.Set("is_primary", val)
+ }
+ if val := ctx.FormTrim("is_activated"); len(val) > 0 {
+ q.Set("is_activated", val)
+ }
+ redirect.RawQuery = q.Encode()
+ ctx.Redirect(redirect.String())
+}
+
+// DeleteEmail serves a POST request for delete a user's email
+func DeleteEmail(ctx *context.Context) {
+ u, err := user_model.GetUserByID(ctx, ctx.FormInt64("Uid"))
+ if err != nil || u == nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ email, err := user_model.GetEmailAddressByID(ctx, u.ID, ctx.FormInt64("id"))
+ if err != nil || email == nil {
+ ctx.ServerError("GetEmailAddressByID", err)
+ return
+ }
+
+ if err := user.DeleteEmailAddresses(ctx, u, []string{email.Email}); err != nil {
+ if user_model.IsErrPrimaryEmailCannotDelete(err) {
+ ctx.Flash.Error(ctx.Tr("admin.emails.delete_primary_email_error"))
+ ctx.JSONRedirect("")
+ return
+ }
+ ctx.ServerError("DeleteEmailAddresses", err)
+ return
+ }
+ log.Trace("Email address deleted: %s %s", u.Name, email.Email)
+
+ ctx.Flash.Success(ctx.Tr("admin.emails.deletion_success"))
+ ctx.JSONRedirect("")
+}
diff --git a/routers/web/admin/hooks.go b/routers/web/admin/hooks.go
new file mode 100644
index 0000000..cdca0a5
--- /dev/null
+++ b/routers/web/admin/hooks.go
@@ -0,0 +1,73 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ webhook_service "code.gitea.io/gitea/services/webhook"
+)
+
+const (
+ // tplAdminHooks template path to render hook settings
+ tplAdminHooks base.TplName = "admin/hooks"
+)
+
+// DefaultOrSystemWebhooks renders both admin default and system webhook list pages
+func DefaultOrSystemWebhooks(ctx *context.Context) {
+ var err error
+
+ ctx.Data["Title"] = ctx.Tr("admin.hooks")
+ ctx.Data["PageIsAdminSystemHooks"] = true
+ ctx.Data["PageIsAdminDefaultHooks"] = true
+
+ def := make(map[string]any, len(ctx.Data))
+ sys := make(map[string]any, len(ctx.Data))
+ for k, v := range ctx.Data {
+ def[k] = v
+ sys[k] = v
+ }
+
+ sys["Title"] = ctx.Tr("admin.systemhooks")
+ sys["Description"] = ctx.Tr("admin.systemhooks.desc", "https://forgejo.org/docs/latest/user/webhooks/")
+ sys["Webhooks"], err = webhook.GetSystemWebhooks(ctx, false)
+ sys["BaseLink"] = setting.AppSubURL + "/admin/hooks"
+ sys["BaseLinkNew"] = setting.AppSubURL + "/admin/system-hooks"
+ sys["WebhookList"] = webhook_service.List()
+ if err != nil {
+ ctx.ServerError("GetWebhooksAdmin", err)
+ return
+ }
+
+ def["Title"] = ctx.Tr("admin.defaulthooks")
+ def["Description"] = ctx.Tr("admin.defaulthooks.desc", "https://forgejo.org/docs/latest/user/webhooks/")
+ def["Webhooks"], err = webhook.GetDefaultWebhooks(ctx)
+ def["BaseLink"] = setting.AppSubURL + "/admin/hooks"
+ def["BaseLinkNew"] = setting.AppSubURL + "/admin/default-hooks"
+ def["WebhookList"] = webhook_service.List()
+ if err != nil {
+ ctx.ServerError("GetWebhooksAdmin", err)
+ return
+ }
+
+ ctx.Data["DefaultWebhooks"] = def
+ ctx.Data["SystemWebhooks"] = sys
+
+ ctx.HTML(http.StatusOK, tplAdminHooks)
+}
+
+// DeleteDefaultOrSystemWebhook handler to delete an admin-defined system or default webhook
+func DeleteDefaultOrSystemWebhook(ctx *context.Context) {
+ if err := webhook.DeleteDefaultSystemWebhook(ctx, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteDefaultWebhook: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook_deletion_success"))
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/hooks")
+}
diff --git a/routers/web/admin/main_test.go b/routers/web/admin/main_test.go
new file mode 100644
index 0000000..e1294dd
--- /dev/null
+++ b/routers/web/admin/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/admin/notice.go b/routers/web/admin/notice.go
new file mode 100644
index 0000000..36303cb
--- /dev/null
+++ b/routers/web/admin/notice.go
@@ -0,0 +1,78 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "strconv"
+
+ "code.gitea.io/gitea/models/db"
+ system_model "code.gitea.io/gitea/models/system"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplNotices base.TplName = "admin/notice"
+)
+
+// Notices show notices for admin
+func Notices(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.notices")
+ ctx.Data["PageIsAdminNotices"] = true
+
+ total := system_model.CountNotices(ctx)
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ notices, err := system_model.Notices(ctx, page, setting.UI.Admin.NoticePagingNum)
+ if err != nil {
+ ctx.ServerError("Notices", err)
+ return
+ }
+ ctx.Data["Notices"] = notices
+
+ ctx.Data["Total"] = total
+
+ ctx.Data["Page"] = context.NewPagination(int(total), setting.UI.Admin.NoticePagingNum, page, 5)
+
+ ctx.HTML(http.StatusOK, tplNotices)
+}
+
+// DeleteNotices delete the specific notices
+func DeleteNotices(ctx *context.Context) {
+ strs := ctx.FormStrings("ids[]")
+ ids := make([]int64, 0, len(strs))
+ for i := range strs {
+ id, _ := strconv.ParseInt(strs[i], 10, 64)
+ if id > 0 {
+ ids = append(ids, id)
+ }
+ }
+
+ if err := db.DeleteByIDs[system_model.Notice](ctx, ids...); err != nil {
+ ctx.Flash.Error("DeleteNoticesByIDs: " + err.Error())
+ ctx.Status(http.StatusInternalServerError)
+ } else {
+ ctx.Flash.Success(ctx.Tr("admin.notices.delete_success"))
+ ctx.Status(http.StatusOK)
+ }
+}
+
+// EmptyNotices delete all the notices
+func EmptyNotices(ctx *context.Context) {
+ if err := system_model.DeleteNotices(ctx, 0, 0); err != nil {
+ ctx.ServerError("DeleteNotices", err)
+ return
+ }
+
+ log.Trace("System notices deleted by admin (%s): [start: %d]", ctx.Doer.Name, 0)
+ ctx.Flash.Success(ctx.Tr("admin.notices.delete_success"))
+ ctx.Redirect(setting.AppSubURL + "/admin/notices")
+}
diff --git a/routers/web/admin/orgs.go b/routers/web/admin/orgs.go
new file mode 100644
index 0000000..cea28f8
--- /dev/null
+++ b/routers/web/admin/orgs.go
@@ -0,0 +1,39 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/routers/web/explore"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplOrgs base.TplName = "admin/org/list"
+)
+
+// Organizations show all the organizations
+func Organizations(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.organizations")
+ ctx.Data["PageIsAdminOrganizations"] = true
+
+ if ctx.FormString("sort") == "" {
+ ctx.SetFormString("sort", UserSearchDefaultAdminSort)
+ }
+
+ explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{
+ Actor: ctx.Doer,
+ Type: user_model.UserTypeOrganization,
+ IncludeReserved: true, // administrator needs to list all accounts include reserved
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.Admin.OrgPagingNum,
+ },
+ Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate},
+ }, tplOrgs)
+}
diff --git a/routers/web/admin/packages.go b/routers/web/admin/packages.go
new file mode 100644
index 0000000..39f064a
--- /dev/null
+++ b/routers/web/admin/packages.go
@@ -0,0 +1,113 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "net/url"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ packages_model "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ packages_service "code.gitea.io/gitea/services/packages"
+ packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup"
+)
+
+const (
+ tplPackagesList base.TplName = "admin/packages/list"
+)
+
+// Packages shows all packages
+func Packages(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ query := ctx.FormTrim("q")
+ packageType := ctx.FormTrim("type")
+ sort := ctx.FormTrim("sort")
+
+ pvs, total, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ Type: packages_model.Type(packageType),
+ Name: packages_model.SearchValue{Value: query},
+ Sort: sort,
+ IsInternal: optional.Some(false),
+ Paginator: &db.ListOptions{
+ PageSize: setting.UI.PackagesPagingNum,
+ Page: page,
+ },
+ })
+ if err != nil {
+ ctx.ServerError("SearchVersions", err)
+ return
+ }
+
+ pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptors", err)
+ return
+ }
+
+ totalBlobSize, err := packages_model.GetTotalBlobSize(ctx)
+ if err != nil {
+ ctx.ServerError("GetTotalBlobSize", err)
+ return
+ }
+
+ totalUnreferencedBlobSize, err := packages_model.GetTotalUnreferencedBlobSize(ctx)
+ if err != nil {
+ ctx.ServerError("CalculateBlobSize", err)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsAdminPackages"] = true
+ ctx.Data["Query"] = query
+ ctx.Data["PackageType"] = packageType
+ ctx.Data["AvailableTypes"] = packages_model.TypeList
+ ctx.Data["SortType"] = sort
+ ctx.Data["PackageDescriptors"] = pds
+ ctx.Data["TotalCount"] = total
+ ctx.Data["TotalBlobSize"] = totalBlobSize - totalUnreferencedBlobSize
+ ctx.Data["TotalUnreferencedBlobSize"] = totalUnreferencedBlobSize
+
+ pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5)
+ pager.AddParamString("q", query)
+ pager.AddParamString("type", packageType)
+ pager.AddParamString("sort", sort)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplPackagesList)
+}
+
+// DeletePackageVersion deletes a package version
+func DeletePackageVersion(ctx *context.Context) {
+ pv, err := packages_model.GetVersionByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return
+ }
+
+ if err := packages_service.RemovePackageVersion(ctx, ctx.Doer, pv); err != nil {
+ ctx.ServerError("RemovePackageVersion", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("packages.settings.delete.success"))
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/packages?page=" + url.QueryEscape(ctx.FormString("page")) + "&q=" + url.QueryEscape(ctx.FormString("q")) + "&type=" + url.QueryEscape(ctx.FormString("type")))
+}
+
+func CleanupExpiredData(ctx *context.Context) {
+ if err := packages_cleanup_service.CleanupExpiredData(ctx, time.Duration(0)); err != nil {
+ ctx.ServerError("CleanupExpiredData", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("admin.packages.cleanup.success"))
+ ctx.Redirect(setting.AppSubURL + "/admin/packages")
+}
diff --git a/routers/web/admin/queue.go b/routers/web/admin/queue.go
new file mode 100644
index 0000000..246ab37
--- /dev/null
+++ b/routers/web/admin/queue.go
@@ -0,0 +1,89 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "strconv"
+
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+func Queues(ctx *context.Context) {
+ if !setting.IsProd {
+ initTestQueueOnce()
+ }
+ ctx.Data["Title"] = ctx.Tr("admin.monitor.queues")
+ ctx.Data["PageIsAdminMonitorQueue"] = true
+ ctx.Data["Queues"] = queue.GetManager().ManagedQueues()
+ ctx.HTML(http.StatusOK, tplQueue)
+}
+
+// QueueManage shows details for a specific queue
+func QueueManage(ctx *context.Context) {
+ qid := ctx.ParamsInt64("qid")
+ mq := queue.GetManager().GetManagedQueue(qid)
+ if mq == nil {
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("admin.monitor.queue", mq.GetName())
+ ctx.Data["PageIsAdminMonitor"] = true
+ ctx.Data["Queue"] = mq
+ ctx.HTML(http.StatusOK, tplQueueManage)
+}
+
+// QueueSet sets the maximum number of workers and other settings for this queue
+func QueueSet(ctx *context.Context) {
+ qid := ctx.ParamsInt64("qid")
+ mq := queue.GetManager().GetManagedQueue(qid)
+ if mq == nil {
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+
+ maxNumberStr := ctx.FormString("max-number")
+
+ var err error
+ var maxNumber int
+ if len(maxNumberStr) > 0 {
+ maxNumber, err = strconv.Atoi(maxNumberStr)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("admin.monitor.queue.settings.maxnumberworkers.error"))
+ ctx.Redirect(setting.AppSubURL + "/admin/monitor/queue/" + strconv.FormatInt(qid, 10))
+ return
+ }
+ if maxNumber < -1 {
+ maxNumber = -1
+ }
+ } else {
+ maxNumber = mq.GetWorkerMaxNumber()
+ }
+
+ mq.SetWorkerMaxNumber(maxNumber)
+ ctx.Flash.Success(ctx.Tr("admin.monitor.queue.settings.changed"))
+ ctx.Redirect(setting.AppSubURL + "/admin/monitor/queue/" + strconv.FormatInt(qid, 10))
+}
+
+func QueueRemoveAllItems(ctx *context.Context) {
+ // Queue in Forgejo doesn't have transaction support
+ // So in rare cases, the queue could be corrupted/out-of-sync
+ // Site admin could remove all items from the queue to make it work again
+ qid := ctx.ParamsInt64("qid")
+ mq := queue.GetManager().GetManagedQueue(qid)
+ if mq == nil {
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+
+ if err := mq.RemoveAllItems(ctx); err != nil {
+ ctx.ServerError("RemoveAllItems", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("admin.monitor.queue.settings.remove_all_items_done"))
+ ctx.Redirect(setting.AppSubURL + "/admin/monitor/queue/" + strconv.FormatInt(qid, 10))
+}
diff --git a/routers/web/admin/queue_tester.go b/routers/web/admin/queue_tester.go
new file mode 100644
index 0000000..8f713b3
--- /dev/null
+++ b/routers/web/admin/queue_tester.go
@@ -0,0 +1,77 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "runtime/pprof"
+ "sync"
+ "time"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/queue"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+var testQueueOnce sync.Once
+
+// initTestQueueOnce initializes the test queue for dev mode
+// the test queue will also be shown in the queue list
+// developers could see the queue length / worker number / items number on the admin page and try to remove the items
+func initTestQueueOnce() {
+ testQueueOnce.Do(func() {
+ ctx, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().ShutdownContext(), "TestQueue", process.SystemProcessType, false)
+ qs := setting.QueueSettings{
+ Name: "test-queue",
+ Type: "channel",
+ Length: 20,
+ BatchLength: 2,
+ MaxWorkers: 3,
+ }
+ testQueue, err := queue.NewWorkerPoolQueueWithContext(ctx, "test-queue", qs, func(t ...int64) (unhandled []int64) {
+ for range t {
+ select {
+ case <-graceful.GetManager().ShutdownContext().Done():
+ case <-time.After(5 * time.Second):
+ }
+ }
+ return nil
+ }, true)
+ if err != nil {
+ log.Error("unable to create test queue: %v", err)
+ return
+ }
+
+ queue.GetManager().AddManagedQueue(testQueue)
+ testQueue.SetWorkerMaxNumber(5)
+ go graceful.GetManager().RunWithCancel(testQueue)
+ go func() {
+ pprof.SetGoroutineLabels(ctx)
+ defer finished()
+
+ cnt := int64(0)
+ adding := true
+ for {
+ select {
+ case <-ctx.Done():
+ case <-time.After(500 * time.Millisecond):
+ if adding {
+ if testQueue.GetQueueItemNumber() == qs.Length {
+ adding = false
+ }
+ } else {
+ if testQueue.GetQueueItemNumber() == 0 {
+ adding = true
+ }
+ }
+ if adding {
+ _ = testQueue.Push(cnt)
+ cnt++
+ }
+ }
+ }
+ }()
+ })
+}
diff --git a/routers/web/admin/repos.go b/routers/web/admin/repos.go
new file mode 100644
index 0000000..d0339fd
--- /dev/null
+++ b/routers/web/admin/repos.go
@@ -0,0 +1,163 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/explore"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplRepos base.TplName = "admin/repo/list"
+ tplUnadoptedRepos base.TplName = "admin/repo/unadopted"
+)
+
+// Repos show all the repositories
+func Repos(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.repositories")
+ ctx.Data["PageIsAdminRepositories"] = true
+
+ explore.RenderRepoSearch(ctx, &explore.RepoSearchOptions{
+ Private: true,
+ PageSize: setting.UI.Admin.RepoPagingNum,
+ TplName: tplRepos,
+ OnlyShowRelevant: false,
+ })
+}
+
+// DeleteRepo delete one repository
+func DeleteRepo(ctx *context.Context) {
+ repo, err := repo_model.GetRepositoryByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return
+ }
+
+ if ctx.Repo != nil && ctx.Repo.GitRepo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == repo.ID {
+ ctx.Repo.GitRepo.Close()
+ }
+
+ if err := repo_service.DeleteRepository(ctx, ctx.Doer, repo, true); err != nil {
+ ctx.ServerError("DeleteRepository", err)
+ return
+ }
+ log.Trace("Repository deleted: %s", repo.FullName())
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success"))
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/repos?page=" + url.QueryEscape(ctx.FormString("page")) + "&sort=" + url.QueryEscape(ctx.FormString("sort")))
+}
+
+// UnadoptedRepos lists the unadopted repositories
+func UnadoptedRepos(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.repositories")
+ ctx.Data["PageIsAdminRepositories"] = true
+
+ opts := db.ListOptions{
+ PageSize: setting.UI.Admin.UserPagingNum,
+ Page: ctx.FormInt("page"),
+ }
+
+ if opts.Page <= 0 {
+ opts.Page = 1
+ }
+
+ ctx.Data["CurrentPage"] = opts.Page
+
+ doSearch := ctx.FormBool("search")
+
+ ctx.Data["search"] = doSearch
+ q := ctx.FormString("q")
+
+ if !doSearch {
+ pager := context.NewPagination(0, opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "search", "search")
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplUnadoptedRepos)
+ return
+ }
+
+ ctx.Data["Keyword"] = q
+ repoNames, count, err := repo_service.ListUnadoptedRepositories(ctx, q, &opts)
+ if err != nil {
+ ctx.ServerError("ListUnadoptedRepositories", err)
+ return
+ }
+ ctx.Data["Dirs"] = repoNames
+ pager := context.NewPagination(count, opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "search", "search")
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplUnadoptedRepos)
+}
+
+// AdoptOrDeleteRepository adopts or deletes a repository
+func AdoptOrDeleteRepository(ctx *context.Context) {
+ dir := ctx.FormString("id")
+ action := ctx.FormString("action")
+ page := ctx.FormString("page")
+ q := ctx.FormString("q")
+
+ dirSplit := strings.SplitN(dir, "/", 2)
+ if len(dirSplit) != 2 {
+ ctx.Redirect(setting.AppSubURL + "/admin/repos")
+ return
+ }
+
+ ctxUser, err := user_model.GetUserByName(ctx, dirSplit[0])
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ log.Debug("User does not exist: %s", dirSplit[0])
+ ctx.Redirect(setting.AppSubURL + "/admin/repos")
+ return
+ }
+ ctx.ServerError("GetUserByName", err)
+ return
+ }
+
+ repoName := dirSplit[1]
+
+ // check not a repo
+ has, err := repo_model.IsRepositoryModelExist(ctx, ctxUser, repoName)
+ if err != nil {
+ ctx.ServerError("IsRepositoryExist", err)
+ return
+ }
+ isDir, err := util.IsDir(repo_model.RepoPath(ctxUser.Name, repoName))
+ if err != nil {
+ ctx.ServerError("IsDir", err)
+ return
+ }
+ if has || !isDir {
+ // Fallthrough to failure mode
+ } else if action == "adopt" {
+ if _, err := repo_service.AdoptRepository(ctx, ctx.Doer, ctxUser, repo_service.CreateRepoOptions{
+ Name: dirSplit[1],
+ IsPrivate: true,
+ }); err != nil {
+ ctx.ServerError("repository.AdoptRepository", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.adopt_preexisting_success", dir))
+ } else if action == "delete" {
+ if err := repo_service.DeleteUnadoptedRepository(ctx, ctx.Doer, ctxUser, dirSplit[1]); err != nil {
+ ctx.ServerError("repository.AdoptRepository", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.delete_preexisting_success", dir))
+ }
+ ctx.Redirect(setting.AppSubURL + "/admin/repos/unadopted?search=true&q=" + url.QueryEscape(q) + "&page=" + url.QueryEscape(page))
+}
diff --git a/routers/web/admin/runners.go b/routers/web/admin/runners.go
new file mode 100644
index 0000000..d73290a
--- /dev/null
+++ b/routers/web/admin/runners.go
@@ -0,0 +1,13 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+func RedirectToDefaultSetting(ctx *context.Context) {
+ ctx.Redirect(setting.AppSubURL + "/admin/actions/runners")
+}
diff --git a/routers/web/admin/stacktrace.go b/routers/web/admin/stacktrace.go
new file mode 100644
index 0000000..d6def94
--- /dev/null
+++ b/routers/web/admin/stacktrace.go
@@ -0,0 +1,46 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "net/http"
+ "runtime"
+
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// Stacktrace show admin monitor goroutines page
+func Stacktrace(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.monitor")
+ ctx.Data["PageIsAdminMonitorStacktrace"] = true
+
+ ctx.Data["GoroutineCount"] = runtime.NumGoroutine()
+
+ show := ctx.FormString("show")
+ ctx.Data["ShowGoroutineList"] = show
+ // by default, do not do anything which might cause server errors, to avoid unnecessary 500 pages.
+ // this page is the entrance of the chance to collect diagnosis report.
+ if show != "" {
+ showNoSystem := show == "process"
+ processStacks, processCount, _, err := process.GetManager().ProcessStacktraces(false, showNoSystem)
+ if err != nil {
+ ctx.ServerError("GoroutineStacktrace", err)
+ return
+ }
+
+ ctx.Data["ProcessStacks"] = processStacks
+ ctx.Data["ProcessCount"] = processCount
+ }
+
+ ctx.HTML(http.StatusOK, tplStacktrace)
+}
+
+// StacktraceCancel cancels a process
+func StacktraceCancel(ctx *context.Context) {
+ pid := ctx.Params("pid")
+ process.GetManager().Cancel(process.IDType(pid))
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/monitor/stacktrace")
+}
diff --git a/routers/web/admin/users.go b/routers/web/admin/users.go
new file mode 100644
index 0000000..25fef5f
--- /dev/null
+++ b/routers/web/admin/users.go
@@ -0,0 +1,557 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "errors"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/explore"
+ user_setting "code.gitea.io/gitea/routers/web/user/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+const (
+ tplUsers base.TplName = "admin/user/list"
+ tplUserNew base.TplName = "admin/user/new"
+ tplUserView base.TplName = "admin/user/view"
+ tplUserEdit base.TplName = "admin/user/edit"
+)
+
+// UserSearchDefaultAdminSort is the default sort type for admin view
+const UserSearchDefaultAdminSort = "alphabetically"
+
+// Users show all the users
+func Users(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.users")
+ ctx.Data["PageIsAdminUsers"] = true
+
+ extraParamStrings := map[string]string{}
+ statusFilterKeys := []string{"is_active", "is_admin", "is_restricted", "is_2fa_enabled", "is_prohibit_login"}
+ statusFilterMap := map[string]string{}
+ for _, filterKey := range statusFilterKeys {
+ paramKey := "status_filter[" + filterKey + "]"
+ paramVal := ctx.FormString(paramKey)
+ statusFilterMap[filterKey] = paramVal
+ if paramVal != "" {
+ extraParamStrings[paramKey] = paramVal
+ }
+ }
+
+ sortType := ctx.FormString("sort")
+ if sortType == "" {
+ sortType = UserSearchDefaultAdminSort
+ ctx.SetFormString("sort", sortType)
+ }
+ ctx.PageData["adminUserListSearchForm"] = map[string]any{
+ "StatusFilterMap": statusFilterMap,
+ "SortType": sortType,
+ }
+
+ explore.RenderUserSearch(ctx, &user_model.SearchUserOptions{
+ Actor: ctx.Doer,
+ Type: user_model.UserTypeIndividual,
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.Admin.UserPagingNum,
+ },
+ SearchByEmail: true,
+ IsActive: util.OptionalBoolParse(statusFilterMap["is_active"]),
+ IsAdmin: util.OptionalBoolParse(statusFilterMap["is_admin"]),
+ IsRestricted: util.OptionalBoolParse(statusFilterMap["is_restricted"]),
+ IsTwoFactorEnabled: util.OptionalBoolParse(statusFilterMap["is_2fa_enabled"]),
+ IsProhibitLogin: util.OptionalBoolParse(statusFilterMap["is_prohibit_login"]),
+ IncludeReserved: true, // administrator needs to list all accounts include reserved, bot, remote ones
+ ExtraParamStrings: extraParamStrings,
+ }, tplUsers)
+}
+
+// NewUser render adding a new user page
+func NewUser(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.users.new_account")
+ ctx.Data["PageIsAdminUsers"] = true
+ ctx.Data["DefaultUserVisibilityMode"] = setting.Service.DefaultUserVisibilityMode
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+
+ ctx.Data["login_type"] = "0-0"
+
+ sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: optional.Some(true),
+ })
+ if err != nil {
+ ctx.ServerError("auth.Sources", err)
+ return
+ }
+ ctx.Data["Sources"] = sources
+
+ ctx.Data["CanSendEmail"] = setting.MailService != nil
+ ctx.HTML(http.StatusOK, tplUserNew)
+}
+
+// NewUserPost response for adding a new user
+func NewUserPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AdminCreateUserForm)
+ ctx.Data["Title"] = ctx.Tr("admin.users.new_account")
+ ctx.Data["PageIsAdminUsers"] = true
+ ctx.Data["DefaultUserVisibilityMode"] = setting.Service.DefaultUserVisibilityMode
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+
+ sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{
+ IsActive: optional.Some(true),
+ })
+ if err != nil {
+ ctx.ServerError("auth.Sources", err)
+ return
+ }
+ ctx.Data["Sources"] = sources
+
+ ctx.Data["CanSendEmail"] = setting.MailService != nil
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplUserNew)
+ return
+ }
+
+ u := &user_model.User{
+ Name: form.UserName,
+ Email: form.Email,
+ Passwd: form.Password,
+ LoginType: auth.Plain,
+ }
+
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ Visibility: &form.Visibility,
+ }
+
+ if len(form.LoginType) > 0 {
+ fields := strings.Split(form.LoginType, "-")
+ if len(fields) == 2 {
+ lType, _ := strconv.ParseInt(fields[0], 10, 0)
+ u.LoginType = auth.Type(lType)
+ u.LoginSource, _ = strconv.ParseInt(fields[1], 10, 64)
+ u.LoginName = form.LoginName
+ }
+ }
+ if u.LoginType == auth.NoType || u.LoginType == auth.Plain {
+ if len(form.Password) < setting.MinPasswordLength {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplUserNew, &form)
+ return
+ }
+ if !password.IsComplexEnough(form.Password) {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(password.BuildComplexityError(ctx.Locale), tplUserNew, &form)
+ return
+ }
+ if err := password.IsPwned(ctx, form.Password); err != nil {
+ ctx.Data["Err_Password"] = true
+ errMsg := ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords")
+ if password.IsErrIsPwnedRequest(err) {
+ log.Error(err.Error())
+ errMsg = ctx.Tr("auth.password_pwned_err")
+ }
+ ctx.RenderWithErr(errMsg, tplUserNew, &form)
+ return
+ }
+ u.MustChangePassword = form.MustChangePassword
+ }
+
+ if err := user_model.AdminCreateUser(ctx, u, overwriteDefault); err != nil {
+ switch {
+ case user_model.IsErrUserAlreadyExist(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_been_taken"), tplUserNew, &form)
+ case user_model.IsErrEmailAlreadyUsed(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplUserNew, &form)
+ case user_model.IsErrEmailInvalid(err), user_model.IsErrEmailCharIsNotSupported(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserNew, &form)
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_reserved", err.(db.ErrNameReserved).Name), tplUserNew, &form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplUserNew, &form)
+ case db.IsErrNameCharsNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_chars_not_allowed", err.(db.ErrNameCharsNotAllowed).Name), tplUserNew, &form)
+ default:
+ ctx.ServerError("CreateUser", err)
+ }
+ return
+ }
+
+ if !user_model.IsEmailDomainAllowed(u.Email) {
+ ctx.Flash.Warning(ctx.Tr("form.email_domain_is_not_allowed", u.Email))
+ }
+
+ log.Trace("Account created by admin (%s): %s", ctx.Doer.Name, u.Name)
+
+ // Send email notification.
+ if form.SendNotify {
+ mailer.SendRegisterNotifyMail(u)
+ }
+
+ ctx.Flash.Success(ctx.Tr("admin.users.new_success", u.Name))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + strconv.FormatInt(u.ID, 10))
+}
+
+func prepareUserInfo(ctx *context.Context) *user_model.User {
+ u, err := user_model.GetUserByID(ctx, ctx.ParamsInt64(":userid"))
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Redirect(setting.AppSubURL + "/admin/users")
+ } else {
+ ctx.ServerError("GetUserByID", err)
+ }
+ return nil
+ }
+ ctx.Data["User"] = u
+
+ if u.LoginSource > 0 {
+ ctx.Data["LoginSource"], err = auth.GetSourceByID(ctx, u.LoginSource)
+ if err != nil {
+ ctx.ServerError("auth.GetSourceByID", err)
+ return nil
+ }
+ } else {
+ ctx.Data["LoginSource"] = &auth.Source{}
+ }
+
+ sources, err := db.Find[auth.Source](ctx, auth.FindSourcesOptions{})
+ if err != nil {
+ ctx.ServerError("auth.Sources", err)
+ return nil
+ }
+ ctx.Data["Sources"] = sources
+
+ hasTOTP, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("auth.HasTwoFactorByUID", err)
+ return nil
+ }
+ hasWebAuthn, err := auth.HasWebAuthnRegistrationsByUID(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("auth.HasWebAuthnRegistrationsByUID", err)
+ return nil
+ }
+ ctx.Data["TwoFactorEnabled"] = hasTOTP || hasWebAuthn
+
+ return u
+}
+
+func ViewUser(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.users.details")
+ ctx.Data["PageIsAdminUsers"] = true
+ ctx.Data["DisableRegularOrgCreation"] = setting.Admin.DisableRegularOrgCreation
+ ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+
+ u := prepareUserInfo(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ repos, count, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: u.ID,
+ OrderBy: db.SearchOrderByAlphabetically,
+ Private: true,
+ Collaborate: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+
+ ctx.Data["Repos"] = repos
+ ctx.Data["ReposTotal"] = int(count)
+
+ emails, err := user_model.GetEmailAddresses(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("GetEmailAddresses", err)
+ return
+ }
+ ctx.Data["Emails"] = emails
+ ctx.Data["EmailsTotal"] = len(emails)
+
+ orgs, err := db.Find[org_model.Organization](ctx, org_model.FindOrgOptions{
+ ListOptions: db.ListOptionsAll,
+ UserID: u.ID,
+ IncludePrivate: true,
+ })
+ if err != nil {
+ ctx.ServerError("FindOrgs", err)
+ return
+ }
+
+ ctx.Data["Users"] = orgs // needed to be able to use explore/user_list template
+ ctx.Data["OrgsTotal"] = len(orgs)
+
+ ctx.HTML(http.StatusOK, tplUserView)
+}
+
+func editUserCommon(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("admin.users.edit_account")
+ ctx.Data["PageIsAdminUsers"] = true
+ ctx.Data["DisableRegularOrgCreation"] = setting.Admin.DisableRegularOrgCreation
+ ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+ ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
+}
+
+// EditUser show editing user page
+func EditUser(ctx *context.Context) {
+ editUserCommon(ctx)
+ prepareUserInfo(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplUserEdit)
+}
+
+// EditUserPost response for editing user
+func EditUserPost(ctx *context.Context) {
+ editUserCommon(ctx)
+ u := prepareUserInfo(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.AdminEditUserForm)
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplUserEdit)
+ return
+ }
+
+ if form.UserName != "" {
+ if err := user_service.RenameUser(ctx, u, form.UserName); err != nil {
+ switch {
+ case user_model.IsErrUserIsNotLocal(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_change_not_local_user"), tplUserEdit, &form)
+ case user_model.IsErrUserAlreadyExist(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_been_taken"), tplUserEdit, &form)
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_reserved", form.UserName), tplUserEdit, &form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_pattern_not_allowed", form.UserName), tplUserEdit, &form)
+ case db.IsErrNameCharsNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_chars_not_allowed", form.UserName), tplUserEdit, &form)
+ default:
+ ctx.ServerError("RenameUser", err)
+ }
+ return
+ }
+ }
+
+ authOpts := &user_service.UpdateAuthOptions{
+ Password: optional.FromNonDefault(form.Password),
+ LoginName: optional.Some(form.LoginName),
+ }
+
+ // skip self Prohibit Login
+ if ctx.Doer.ID == u.ID {
+ authOpts.ProhibitLogin = optional.Some(false)
+ } else {
+ authOpts.ProhibitLogin = optional.Some(form.ProhibitLogin)
+ }
+
+ fields := strings.Split(form.LoginType, "-")
+ if len(fields) == 2 {
+ authSource, _ := strconv.ParseInt(fields[1], 10, 64)
+
+ authOpts.LoginSource = optional.Some(authSource)
+ }
+
+ if err := user_service.UpdateAuth(ctx, u, authOpts); err != nil {
+ switch {
+ case errors.Is(err, password.ErrMinLength):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplUserEdit, &form)
+ case errors.Is(err, password.ErrComplexity):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(password.BuildComplexityError(ctx.Locale), tplUserEdit, &form)
+ case errors.Is(err, password.ErrIsPwned):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords"), tplUserEdit, &form)
+ case password.IsErrIsPwnedRequest(err):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned_err"), tplUserEdit, &form)
+ default:
+ ctx.ServerError("UpdateUser", err)
+ }
+ return
+ }
+
+ if form.Email != "" {
+ if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, u, form.Email); err != nil {
+ switch {
+ case user_model.IsErrEmailCharIsNotSupported(err), user_model.IsErrEmailInvalid(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserEdit, &form)
+ case user_model.IsErrEmailAlreadyUsed(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplUserEdit, &form)
+ default:
+ ctx.ServerError("AddOrSetPrimaryEmailAddress", err)
+ }
+ return
+ }
+ if !user_model.IsEmailDomainAllowed(form.Email) {
+ ctx.Flash.Warning(ctx.Tr("form.email_domain_is_not_allowed", form.Email))
+ }
+ }
+
+ opts := &user_service.UpdateOptions{
+ FullName: optional.Some(form.FullName),
+ Website: optional.Some(form.Website),
+ Location: optional.Some(form.Location),
+ Pronouns: optional.Some(form.Pronouns),
+ IsActive: optional.Some(form.Active),
+ IsAdmin: optional.Some(form.Admin),
+ AllowGitHook: optional.Some(form.AllowGitHook),
+ AllowImportLocal: optional.Some(form.AllowImportLocal),
+ MaxRepoCreation: optional.Some(form.MaxRepoCreation),
+ AllowCreateOrganization: optional.Some(form.AllowCreateOrganization),
+ IsRestricted: optional.Some(form.Restricted),
+ Visibility: optional.Some(form.Visibility),
+ Language: optional.Some(form.Language),
+ }
+
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ if models.IsErrDeleteLastAdminUser(err) {
+ ctx.RenderWithErr(ctx.Tr("auth.last_admin"), tplUserEdit, &form)
+ } else {
+ ctx.ServerError("UpdateUser", err)
+ }
+ return
+ }
+ log.Trace("Account profile updated by admin (%s): %s", ctx.Doer.Name, u.Name)
+
+ if form.Reset2FA {
+ tf, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("auth.GetTwoFactorByUID", err)
+ return
+ } else if tf != nil {
+ if err := auth.DeleteTwoFactorByID(ctx, tf.ID, u.ID); err != nil {
+ ctx.ServerError("auth.DeleteTwoFactorByID", err)
+ return
+ }
+ }
+
+ wn, err := auth.GetWebAuthnCredentialsByUID(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("auth.GetTwoFactorByUID", err)
+ return
+ }
+ for _, cred := range wn {
+ if _, err := auth.DeleteCredential(ctx, cred.ID, u.ID); err != nil {
+ ctx.ServerError("auth.DeleteCredential", err)
+ return
+ }
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("admin.users.update_profile_success"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+}
+
+// DeleteUser response for deleting a user
+func DeleteUser(ctx *context.Context) {
+ u, err := user_model.GetUserByID(ctx, ctx.ParamsInt64(":userid"))
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ // admin should not delete themself
+ if u.ID == ctx.Doer.ID {
+ ctx.Flash.Error(ctx.Tr("admin.users.cannot_delete_self"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+ return
+ }
+
+ if err = user_service.DeleteUser(ctx, u, ctx.FormBool("purge")); err != nil {
+ switch {
+ case models.IsErrUserOwnRepos(err):
+ ctx.Flash.Error(ctx.Tr("admin.users.still_own_repo"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+ case models.IsErrUserHasOrgs(err):
+ ctx.Flash.Error(ctx.Tr("admin.users.still_has_org"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+ case models.IsErrUserOwnPackages(err):
+ ctx.Flash.Error(ctx.Tr("admin.users.still_own_packages"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+ case models.IsErrDeleteLastAdminUser(err):
+ ctx.Flash.Error(ctx.Tr("auth.last_admin"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + url.PathEscape(ctx.Params(":userid")))
+ default:
+ ctx.ServerError("DeleteUser", err)
+ }
+ return
+ }
+ log.Trace("Account deleted by admin (%s): %s", ctx.Doer.Name, u.Name)
+
+ ctx.Flash.Success(ctx.Tr("admin.users.deletion_success"))
+ ctx.Redirect(setting.AppSubURL + "/admin/users")
+}
+
+// AvatarPost response for change user's avatar request
+func AvatarPost(ctx *context.Context) {
+ u := prepareUserInfo(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.AvatarForm)
+ if err := user_setting.UpdateAvatarSetting(ctx, form, u); err != nil {
+ ctx.Flash.Error(err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.update_user_avatar_success"))
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/admin/users/" + strconv.FormatInt(u.ID, 10))
+}
+
+// DeleteAvatar render delete avatar page
+func DeleteAvatar(ctx *context.Context) {
+ u := prepareUserInfo(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := user_service.DeleteAvatar(ctx, u); err != nil {
+ ctx.Flash.Error(err.Error())
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/admin/users/" + strconv.FormatInt(u.ID, 10))
+}
diff --git a/routers/web/admin/users_test.go b/routers/web/admin/users_test.go
new file mode 100644
index 0000000..ae3b130
--- /dev/null
+++ b/routers/web/admin/users_test.go
@@ -0,0 +1,200 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package admin
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewUserPost_MustChangePassword(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "admin/users/new")
+
+ u := unittest.AssertExistsAndLoadBean(t, &user_model.User{
+ IsAdmin: true,
+ ID: 2,
+ })
+
+ ctx.Doer = u
+
+ username := "gitea"
+ email := "gitea@gitea.io"
+
+ form := forms.AdminCreateUserForm{
+ LoginType: "local",
+ LoginName: "local",
+ UserName: username,
+ Email: email,
+ Password: "abc123ABC!=$",
+ SendNotify: false,
+ MustChangePassword: true,
+ }
+
+ web.SetForm(ctx, &form)
+ NewUserPost(ctx)
+
+ assert.NotEmpty(t, ctx.Flash.SuccessMsg)
+
+ u, err := user_model.GetUserByName(ctx, username)
+
+ require.NoError(t, err)
+ assert.Equal(t, username, u.Name)
+ assert.Equal(t, email, u.Email)
+ assert.True(t, u.MustChangePassword)
+}
+
+func TestNewUserPost_MustChangePasswordFalse(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "admin/users/new")
+
+ u := unittest.AssertExistsAndLoadBean(t, &user_model.User{
+ IsAdmin: true,
+ ID: 2,
+ })
+
+ ctx.Doer = u
+
+ username := "gitea"
+ email := "gitea@gitea.io"
+
+ form := forms.AdminCreateUserForm{
+ LoginType: "local",
+ LoginName: "local",
+ UserName: username,
+ Email: email,
+ Password: "abc123ABC!=$",
+ SendNotify: false,
+ MustChangePassword: false,
+ }
+
+ web.SetForm(ctx, &form)
+ NewUserPost(ctx)
+
+ assert.NotEmpty(t, ctx.Flash.SuccessMsg)
+
+ u, err := user_model.GetUserByName(ctx, username)
+
+ require.NoError(t, err)
+ assert.Equal(t, username, u.Name)
+ assert.Equal(t, email, u.Email)
+ assert.False(t, u.MustChangePassword)
+}
+
+func TestNewUserPost_InvalidEmail(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "admin/users/new")
+
+ u := unittest.AssertExistsAndLoadBean(t, &user_model.User{
+ IsAdmin: true,
+ ID: 2,
+ })
+
+ ctx.Doer = u
+
+ username := "gitea"
+ email := "gitea@gitea.io\r\n"
+
+ form := forms.AdminCreateUserForm{
+ LoginType: "local",
+ LoginName: "local",
+ UserName: username,
+ Email: email,
+ Password: "abc123ABC!=$",
+ SendNotify: false,
+ MustChangePassword: false,
+ }
+
+ web.SetForm(ctx, &form)
+ NewUserPost(ctx)
+
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestNewUserPost_VisibilityDefaultPublic(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "admin/users/new")
+
+ u := unittest.AssertExistsAndLoadBean(t, &user_model.User{
+ IsAdmin: true,
+ ID: 2,
+ })
+
+ ctx.Doer = u
+
+ username := "gitea"
+ email := "gitea@gitea.io"
+
+ form := forms.AdminCreateUserForm{
+ LoginType: "local",
+ LoginName: "local",
+ UserName: username,
+ Email: email,
+ Password: "abc123ABC!=$",
+ SendNotify: false,
+ MustChangePassword: false,
+ }
+
+ web.SetForm(ctx, &form)
+ NewUserPost(ctx)
+
+ assert.NotEmpty(t, ctx.Flash.SuccessMsg)
+
+ u, err := user_model.GetUserByName(ctx, username)
+
+ require.NoError(t, err)
+ assert.Equal(t, username, u.Name)
+ assert.Equal(t, email, u.Email)
+ // As default user visibility
+ assert.Equal(t, setting.Service.DefaultUserVisibilityMode, u.Visibility)
+}
+
+func TestNewUserPost_VisibilityPrivate(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "admin/users/new")
+
+ u := unittest.AssertExistsAndLoadBean(t, &user_model.User{
+ IsAdmin: true,
+ ID: 2,
+ })
+
+ ctx.Doer = u
+
+ username := "gitea"
+ email := "gitea@gitea.io"
+
+ form := forms.AdminCreateUserForm{
+ LoginType: "local",
+ LoginName: "local",
+ UserName: username,
+ Email: email,
+ Password: "abc123ABC!=$",
+ SendNotify: false,
+ MustChangePassword: false,
+ Visibility: api.VisibleTypePrivate,
+ }
+
+ web.SetForm(ctx, &form)
+ NewUserPost(ctx)
+
+ assert.NotEmpty(t, ctx.Flash.SuccessMsg)
+
+ u, err := user_model.GetUserByName(ctx, username)
+
+ require.NoError(t, err)
+ assert.Equal(t, username, u.Name)
+ assert.Equal(t, email, u.Email)
+ // As default user visibility
+ assert.True(t, u.Visibility.IsPrivate())
+}
diff --git a/routers/web/auth/2fa.go b/routers/web/auth/2fa.go
new file mode 100644
index 0000000..f93177b
--- /dev/null
+++ b/routers/web/auth/2fa.go
@@ -0,0 +1,163 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/externalaccount"
+ "code.gitea.io/gitea/services/forms"
+)
+
+var (
+ tplTwofa base.TplName = "user/auth/twofa"
+ tplTwofaScratch base.TplName = "user/auth/twofa_scratch"
+)
+
+// TwoFactor shows the user a two-factor authentication page.
+func TwoFactor(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("twofa")
+
+ if CheckAutoLogin(ctx) {
+ return
+ }
+
+ // Ensure user is in a 2FA session.
+ if ctx.Session.Get("twofaUid") == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in 2FA session"))
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplTwofa)
+}
+
+// TwoFactorPost validates a user's two-factor authentication token.
+func TwoFactorPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.TwoFactorAuthForm)
+ ctx.Data["Title"] = ctx.Tr("twofa")
+
+ // Ensure user is in a 2FA session.
+ idSess := ctx.Session.Get("twofaUid")
+ if idSess == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in 2FA session"))
+ return
+ }
+
+ id := idSess.(int64)
+ twofa, err := auth.GetTwoFactorByUID(ctx, id)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ // Validate the passcode with the stored TOTP secret.
+ ok, err := twofa.ValidateTOTP(form.Passcode)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ if ok && twofa.LastUsedPasscode != form.Passcode {
+ remember := ctx.Session.Get("twofaRemember").(bool)
+ u, err := user_model.GetUserByID(ctx, id)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ if ctx.Session.Get("linkAccount") != nil {
+ err = externalaccount.LinkAccountFromStore(ctx, ctx.Session, u)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ }
+
+ twofa.LastUsedPasscode = form.Passcode
+ if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ handleSignIn(ctx, u, remember)
+ return
+ }
+
+ ctx.RenderWithErr(ctx.Tr("auth.twofa_passcode_incorrect"), tplTwofa, forms.TwoFactorAuthForm{})
+}
+
+// TwoFactorScratch shows the scratch code form for two-factor authentication.
+func TwoFactorScratch(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("twofa_scratch")
+
+ if CheckAutoLogin(ctx) {
+ return
+ }
+
+ // Ensure user is in a 2FA session.
+ if ctx.Session.Get("twofaUid") == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in 2FA session"))
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplTwofaScratch)
+}
+
+// TwoFactorScratchPost validates and invalidates a user's two-factor scratch token.
+func TwoFactorScratchPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.TwoFactorScratchAuthForm)
+ ctx.Data["Title"] = ctx.Tr("twofa_scratch")
+
+ // Ensure user is in a 2FA session.
+ idSess := ctx.Session.Get("twofaUid")
+ if idSess == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in 2FA session"))
+ return
+ }
+
+ id := idSess.(int64)
+ twofa, err := auth.GetTwoFactorByUID(ctx, id)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ // Validate the passcode with the stored TOTP secret.
+ if twofa.VerifyScratchToken(form.Token) {
+ // Invalidate the scratch token.
+ _, err = twofa.GenerateScratchToken()
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ remember := ctx.Session.Get("twofaRemember").(bool)
+ u, err := user_model.GetUserByID(ctx, id)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ handleSignInFull(ctx, u, remember, false)
+ if ctx.Written() {
+ return
+ }
+ ctx.Flash.Info(ctx.Tr("auth.twofa_scratch_used"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ return
+ }
+
+ ctx.RenderWithErr(ctx.Tr("auth.twofa_scratch_token_incorrect"), tplTwofaScratch, forms.TwoFactorScratchAuthForm{})
+}
diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go
new file mode 100644
index 0000000..bb20309
--- /dev/null
+++ b/routers/web/auth/auth.go
@@ -0,0 +1,881 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "crypto/subtle"
+ "encoding/hex"
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/eventsource"
+ "code.gitea.io/gitea/modules/httplib"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/session"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ auth_service "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/externalaccount"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+ notify_service "code.gitea.io/gitea/services/notify"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "github.com/markbates/goth"
+)
+
+const (
+ // tplSignIn template for sign in page
+ tplSignIn base.TplName = "user/auth/signin"
+ // tplSignUp template path for sign up page
+ tplSignUp base.TplName = "user/auth/signup"
+ // TplActivate template path for activate user
+ TplActivate base.TplName = "user/auth/activate"
+)
+
+// autoSignIn reads cookie and try to auto-login.
+func autoSignIn(ctx *context.Context) (bool, error) {
+ isSucceed := false
+ defer func() {
+ if !isSucceed {
+ ctx.DeleteSiteCookie(setting.CookieRememberName)
+ }
+ }()
+
+ authCookie := ctx.GetSiteCookie(setting.CookieRememberName)
+ if len(authCookie) == 0 {
+ return false, nil
+ }
+
+ lookupKey, validator, found := strings.Cut(authCookie, ":")
+ if !found {
+ return false, nil
+ }
+
+ authToken, err := auth.FindAuthToken(ctx, lookupKey)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ return false, nil
+ }
+ return false, err
+ }
+
+ if authToken.IsExpired() {
+ err = auth.DeleteAuthToken(ctx, authToken)
+ return false, err
+ }
+
+ rawValidator, err := hex.DecodeString(validator)
+ if err != nil {
+ return false, err
+ }
+
+ if subtle.ConstantTimeCompare([]byte(authToken.HashedValidator), []byte(auth.HashValidator(rawValidator))) == 0 {
+ return false, nil
+ }
+
+ u, err := user_model.GetUserByID(ctx, authToken.UID)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ return false, fmt.Errorf("GetUserByID: %w", err)
+ }
+ return false, nil
+ }
+
+ isSucceed = true
+
+ if err := updateSession(ctx, nil, map[string]any{
+ // Set session IDs
+ "uid": u.ID,
+ }); err != nil {
+ return false, fmt.Errorf("unable to updateSession: %w", err)
+ }
+
+ if err := resetLocale(ctx, u); err != nil {
+ return false, err
+ }
+
+ ctx.Csrf.DeleteCookie(ctx)
+ return true, nil
+}
+
+func resetLocale(ctx *context.Context, u *user_model.User) error {
+ // Language setting of the user overwrites the one previously set
+ // If the user does not have a locale set, we save the current one.
+ if u.Language == "" {
+ opts := &user_service.UpdateOptions{
+ Language: optional.Some(ctx.Locale.Language()),
+ }
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ return err
+ }
+ }
+
+ middleware.SetLocaleCookie(ctx.Resp, u.Language, 0)
+
+ if ctx.Locale.Language() != u.Language {
+ ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req)
+ }
+
+ return nil
+}
+
+func RedirectAfterLogin(ctx *context.Context) {
+ redirectTo := ctx.FormString("redirect_to")
+ if redirectTo == "" {
+ redirectTo = ctx.GetSiteCookie("redirect_to")
+ }
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ nextRedirectTo := setting.AppSubURL + string(setting.LandingPageURL)
+ if setting.LandingPageURL == setting.LandingPageLogin {
+ nextRedirectTo = setting.AppSubURL + "/" // do not cycle-redirect to the login page
+ }
+ ctx.RedirectToFirst(redirectTo, nextRedirectTo)
+}
+
+func CheckAutoLogin(ctx *context.Context) bool {
+ isSucceed, err := autoSignIn(ctx) // try to auto-login
+ if err != nil {
+ ctx.ServerError("autoSignIn", err)
+ return true
+ }
+
+ redirectTo := ctx.FormString("redirect_to")
+ if len(redirectTo) > 0 {
+ middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
+ }
+
+ if isSucceed {
+ RedirectAfterLogin(ctx)
+ return true
+ }
+
+ return false
+}
+
+// SignIn render sign in page
+func SignIn(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+
+ if CheckAutoLogin(ctx) {
+ return
+ }
+
+ if ctx.IsSigned {
+ RedirectAfterLogin(ctx)
+ return
+ }
+
+ oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true))
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ ctx.Data["OAuth2Providers"] = oauth2Providers
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsLogin"] = true
+ ctx.Data["EnableSSPI"] = auth.IsSSPIEnabled(ctx)
+
+ if setting.Service.EnableCaptcha && setting.Service.RequireCaptchaForLogin {
+ context.SetCaptchaData(ctx)
+ }
+
+ ctx.HTML(http.StatusOK, tplSignIn)
+}
+
+// SignInPost response for sign in request
+func SignInPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+
+ oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true))
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ ctx.Data["OAuth2Providers"] = oauth2Providers
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsLogin"] = true
+ ctx.Data["EnableSSPI"] = auth.IsSSPIEnabled(ctx)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSignIn)
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.SignInForm)
+
+ if setting.Service.EnableCaptcha && setting.Service.RequireCaptchaForLogin {
+ context.SetCaptchaData(ctx)
+
+ context.VerifyCaptcha(ctx, tplSignIn, form)
+ if ctx.Written() {
+ return
+ }
+ }
+
+ u, source, err := auth_service.UserSignIn(ctx, form.UserName, form.Password)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) || errors.Is(err, util.ErrInvalidArgument) {
+ ctx.RenderWithErr(ctx.Tr("form.username_password_incorrect"), tplSignIn, &form)
+ log.Warn("Failed authentication attempt for %s from %s: %v", form.UserName, ctx.RemoteAddr(), err)
+ } else if user_model.IsErrEmailAlreadyUsed(err) {
+ ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplSignIn, &form)
+ log.Warn("Failed authentication attempt for %s from %s: %v", form.UserName, ctx.RemoteAddr(), err)
+ } else if user_model.IsErrUserProhibitLogin(err) {
+ log.Warn("Failed authentication attempt for %s from %s: %v", form.UserName, ctx.RemoteAddr(), err)
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ } else if user_model.IsErrUserInactive(err) {
+ if setting.Service.RegisterEmailConfirm {
+ ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
+ ctx.HTML(http.StatusOK, TplActivate)
+ } else {
+ log.Warn("Failed authentication attempt for %s from %s: %v", form.UserName, ctx.RemoteAddr(), err)
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ }
+ } else {
+ ctx.ServerError("UserSignIn", err)
+ }
+ return
+ }
+
+ // Now handle 2FA:
+
+ // First of all if the source can skip local two fa we're done
+ if skipper, ok := source.Cfg.(auth_service.LocalTwoFASkipper); ok && skipper.IsSkipLocalTwoFA() {
+ handleSignIn(ctx, u, form.Remember)
+ return
+ }
+
+ // If this user is enrolled in 2FA TOTP, we can't sign the user in just yet.
+ // Instead, redirect them to the 2FA authentication page.
+ hasTOTPtwofa, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ // Check if the user has webauthn registration
+ hasWebAuthnTwofa, err := auth.HasWebAuthnRegistrationsByUID(ctx, u.ID)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ if !hasTOTPtwofa && !hasWebAuthnTwofa {
+ // No two factor auth configured we can sign in the user
+ handleSignIn(ctx, u, form.Remember)
+ return
+ }
+
+ updates := map[string]any{
+ // User will need to use 2FA TOTP or WebAuthn, save data
+ "twofaUid": u.ID,
+ "twofaRemember": form.Remember,
+ }
+ if hasTOTPtwofa {
+ // User will need to use WebAuthn, save data
+ updates["totpEnrolled"] = u.ID
+ }
+ if err := updateSession(ctx, nil, updates); err != nil {
+ ctx.ServerError("UserSignIn: Unable to update session", err)
+ return
+ }
+
+ // If we have WebAuthn redirect there first
+ if hasWebAuthnTwofa {
+ ctx.Redirect(setting.AppSubURL + "/user/webauthn")
+ return
+ }
+
+ // Fallback to 2FA
+ ctx.Redirect(setting.AppSubURL + "/user/two_factor")
+}
+
+// This handles the final part of the sign-in process of the user.
+func handleSignIn(ctx *context.Context, u *user_model.User, remember bool) {
+ redirect := handleSignInFull(ctx, u, remember, true)
+ if ctx.Written() {
+ return
+ }
+ ctx.Redirect(redirect)
+}
+
+func handleSignInFull(ctx *context.Context, u *user_model.User, remember, obeyRedirect bool) string {
+ if remember {
+ if err := ctx.SetLTACookie(u); err != nil {
+ ctx.ServerError("GenerateAuthToken", err)
+ return setting.AppSubURL + "/"
+ }
+ }
+
+ if err := updateSession(ctx, []string{
+ // Delete the openid, 2fa and linkaccount data
+ "openid_verified_uri",
+ "openid_signin_remember",
+ "openid_determined_email",
+ "openid_determined_username",
+ "twofaUid",
+ "twofaRemember",
+ "linkAccount",
+ }, map[string]any{
+ "uid": u.ID,
+ }); err != nil {
+ ctx.ServerError("RegenerateSession", err)
+ return setting.AppSubURL + "/"
+ }
+
+ // Language setting of the user overwrites the one previously set
+ // If the user does not have a locale set, we save the current one.
+ if u.Language == "" {
+ opts := &user_service.UpdateOptions{
+ Language: optional.Some(ctx.Locale.Language()),
+ }
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ ctx.ServerError("UpdateUser Language", fmt.Errorf("Error updating user language [user: %d, locale: %s]", u.ID, ctx.Locale.Language()))
+ return setting.AppSubURL + "/"
+ }
+ }
+
+ middleware.SetLocaleCookie(ctx.Resp, u.Language, 0)
+
+ if ctx.Locale.Language() != u.Language {
+ ctx.Locale = middleware.Locale(ctx.Resp, ctx.Req)
+ }
+
+ // Clear whatever CSRF cookie has right now, force to generate a new one
+ ctx.Csrf.DeleteCookie(ctx)
+
+ // Register last login
+ if err := user_service.UpdateUser(ctx, u, &user_service.UpdateOptions{SetLastLogin: true}); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return setting.AppSubURL + "/"
+ }
+
+ redirectTo := ctx.GetSiteCookie("redirect_to")
+ if redirectTo != "" {
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ }
+ if obeyRedirect {
+ return ctx.RedirectToFirst(redirectTo)
+ }
+ if !httplib.IsRiskyRedirectURL(redirectTo) {
+ return redirectTo
+ }
+ return setting.AppSubURL + "/"
+}
+
+func getUserName(gothUser *goth.User) (string, error) {
+ switch setting.OAuth2Client.Username {
+ case setting.OAuth2UsernameEmail:
+ return user_model.NormalizeUserName(strings.Split(gothUser.Email, "@")[0])
+ case setting.OAuth2UsernameNickname:
+ return user_model.NormalizeUserName(gothUser.NickName)
+ default: // OAuth2UsernameUserid
+ return gothUser.UserID, nil
+ }
+}
+
+// HandleSignOut resets the session and sets the cookies
+func HandleSignOut(ctx *context.Context) {
+ _ = ctx.Session.Flush()
+ _ = ctx.Session.Destroy(ctx.Resp, ctx.Req)
+ ctx.DeleteSiteCookie(setting.CookieRememberName)
+ ctx.Csrf.DeleteCookie(ctx)
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+}
+
+// SignOut sign out from login status
+func SignOut(ctx *context.Context) {
+ if ctx.Doer != nil {
+ eventsource.GetManager().SendMessage(ctx.Doer.ID, &eventsource.Event{
+ Name: "logout",
+ Data: ctx.Session.ID(),
+ })
+ }
+ HandleSignOut(ctx)
+ ctx.JSONRedirect(setting.AppSubURL + "/")
+}
+
+// SignUp render the register page
+func SignUp(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("sign_up")
+
+ ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/sign_up"
+
+ oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true))
+ if err != nil {
+ ctx.ServerError("UserSignUp", err)
+ return
+ }
+
+ ctx.Data["OAuth2Providers"] = oauth2Providers
+ context.SetCaptchaData(ctx)
+
+ ctx.Data["PageIsSignUp"] = true
+
+ // Show Disabled Registration message if DisableRegistration or AllowOnlyExternalRegistration options are true
+ ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration
+
+ redirectTo := ctx.FormString("redirect_to")
+ if len(redirectTo) > 0 {
+ middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
+ }
+
+ ctx.HTML(http.StatusOK, tplSignUp)
+}
+
+// SignUpPost response for sign up information submission
+func SignUpPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RegisterForm)
+ ctx.Data["Title"] = ctx.Tr("sign_up")
+
+ ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/sign_up"
+
+ oauth2Providers, err := oauth2.GetOAuth2Providers(ctx, optional.Some(true))
+ if err != nil {
+ ctx.ServerError("UserSignUp", err)
+ return
+ }
+
+ ctx.Data["OAuth2Providers"] = oauth2Providers
+ context.SetCaptchaData(ctx)
+
+ ctx.Data["PageIsSignUp"] = true
+
+ // Permission denied if DisableRegistration or AllowOnlyExternalRegistration options are true
+ if setting.Service.DisableRegistration || setting.Service.AllowOnlyExternalRegistration {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSignUp)
+ return
+ }
+
+ context.VerifyCaptcha(ctx, tplSignUp, form)
+ if ctx.Written() {
+ return
+ }
+
+ if !form.IsEmailDomainAllowed() {
+ ctx.RenderWithErr(ctx.Tr("auth.email_domain_blacklisted"), tplSignUp, &form)
+ return
+ }
+
+ if form.Password != form.Retype {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("form.password_not_match"), tplSignUp, &form)
+ return
+ }
+ if len(form.Password) < setting.MinPasswordLength {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplSignUp, &form)
+ return
+ }
+ if !password.IsComplexEnough(form.Password) {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(password.BuildComplexityError(ctx.Locale), tplSignUp, &form)
+ return
+ }
+ if err := password.IsPwned(ctx, form.Password); err != nil {
+ errMsg := ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords")
+ if password.IsErrIsPwnedRequest(err) {
+ log.Error(err.Error())
+ errMsg = ctx.Tr("auth.password_pwned_err")
+ }
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(errMsg, tplSignUp, &form)
+ return
+ }
+
+ u := &user_model.User{
+ Name: form.UserName,
+ Email: form.Email,
+ Passwd: form.Password,
+ }
+
+ if !createAndHandleCreatedUser(ctx, tplSignUp, form, u, nil, nil, false) {
+ // error already handled
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("auth.sign_up_successful"))
+ handleSignIn(ctx, u, false)
+}
+
+// createAndHandleCreatedUser calls createUserInContext and
+// then handleUserCreated.
+func createAndHandleCreatedUser(ctx *context.Context, tpl base.TplName, form any, u *user_model.User, overwrites *user_model.CreateUserOverwriteOptions, gothUser *goth.User, allowLink bool) bool {
+ if !createUserInContext(ctx, tpl, form, u, overwrites, gothUser, allowLink) {
+ return false
+ }
+ return handleUserCreated(ctx, u, gothUser)
+}
+
+// createUserInContext creates a user and handles errors within a given context.
+// Optionally a template can be specified.
+func createUserInContext(ctx *context.Context, tpl base.TplName, form any, u *user_model.User, overwrites *user_model.CreateUserOverwriteOptions, gothUser *goth.User, allowLink bool) (ok bool) {
+ if err := user_model.CreateUser(ctx, u, overwrites); err != nil {
+ if allowLink && (user_model.IsErrUserAlreadyExist(err) || user_model.IsErrEmailAlreadyUsed(err)) {
+ if setting.OAuth2Client.AccountLinking == setting.OAuth2AccountLinkingAuto {
+ var user *user_model.User
+ user = &user_model.User{Name: u.Name}
+ hasUser, err := user_model.GetUser(ctx, user)
+ if !hasUser || err != nil {
+ user = &user_model.User{Email: u.Email}
+ hasUser, err = user_model.GetUser(ctx, user)
+ if !hasUser || err != nil {
+ ctx.ServerError("UserLinkAccount", err)
+ return false
+ }
+ }
+
+ // TODO: probably we should respect 'remember' user's choice...
+ linkAccount(ctx, user, *gothUser, true)
+ return false // user is already created here, all redirects are handled
+ } else if setting.OAuth2Client.AccountLinking == setting.OAuth2AccountLinkingLogin {
+ showLinkingLogin(ctx, *gothUser)
+ return false // user will be created only after linking login
+ }
+ }
+
+ // handle error without template
+ if len(tpl) == 0 {
+ ctx.ServerError("CreateUser", err)
+ return false
+ }
+
+ // handle error with template
+ switch {
+ case user_model.IsErrUserAlreadyExist(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_been_taken"), tpl, form)
+ case user_model.IsErrEmailAlreadyUsed(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tpl, form)
+ case user_model.IsErrEmailCharIsNotSupported(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form)
+ case user_model.IsErrEmailInvalid(err):
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form)
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_reserved", err.(db.ErrNameReserved).Name), tpl, form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form)
+ case db.IsErrNameCharsNotAllowed(err):
+ ctx.Data["Err_UserName"] = true
+ ctx.RenderWithErr(ctx.Tr("user.form.name_chars_not_allowed", err.(db.ErrNameCharsNotAllowed).Name), tpl, form)
+ default:
+ ctx.ServerError("CreateUser", err)
+ }
+ return false
+ }
+ log.Trace("Account created: %s", u.Name)
+ return true
+}
+
+// handleUserCreated does additional steps after a new user is created.
+// It auto-sets admin for the only user, updates the optional external user and
+// sends a confirmation email if required.
+func handleUserCreated(ctx *context.Context, u *user_model.User, gothUser *goth.User) (ok bool) {
+ // Auto-set admin for the only user.
+ if user_model.CountUsers(ctx, nil) == 1 {
+ opts := &user_service.UpdateOptions{
+ IsActive: optional.Some(true),
+ IsAdmin: optional.Some(true),
+ SetLastLogin: true,
+ }
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return false
+ }
+ }
+
+ notify_service.NewUserSignUp(ctx, u)
+ // update external user information
+ if gothUser != nil {
+ if err := externalaccount.EnsureLinkExternalToUser(ctx, u, *gothUser); err != nil {
+ log.Error("EnsureLinkExternalToUser failed: %v", err)
+ }
+ }
+
+ // Send confirmation email
+ if !u.IsActive && u.ID > 1 {
+ if setting.Service.RegisterManualConfirm {
+ ctx.Data["ManualActivationOnly"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return false
+ }
+
+ mailer.SendActivateAccountMail(ctx.Locale, u)
+
+ ctx.Data["IsSendRegisterMail"] = true
+ ctx.Data["Email"] = u.Email
+ ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
+ ctx.HTML(http.StatusOK, TplActivate)
+
+ if err := ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
+ log.Error("Set cache(MailResendLimit) fail: %v", err)
+ }
+ return false
+ }
+
+ return true
+}
+
+// Activate render activate user page
+func Activate(ctx *context.Context) {
+ code := ctx.FormString("code")
+
+ if len(code) == 0 {
+ ctx.Data["IsActivatePage"] = true
+ if ctx.Doer == nil || ctx.Doer.IsActive {
+ ctx.NotFound("invalid user", nil)
+ return
+ }
+ // Resend confirmation email.
+ if setting.Service.RegisterEmailConfirm {
+ var cacheKey string
+ if ctx.Cache.IsExist("MailChangedJustNow_" + ctx.Doer.LowerName) {
+ cacheKey = "MailChangedLimit_"
+ if err := ctx.Cache.Delete("MailChangedJustNow_" + ctx.Doer.LowerName); err != nil {
+ log.Error("Delete cache(MailChangedJustNow) fail: %v", err)
+ }
+ } else {
+ cacheKey = "MailResendLimit_"
+ }
+ if ctx.Cache.IsExist(cacheKey + ctx.Doer.LowerName) {
+ ctx.Data["ResendLimited"] = true
+ } else {
+ ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
+ mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer)
+
+ if err := ctx.Cache.Put(cacheKey+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
+ log.Error("Set cache(MailResendLimit) fail: %v", err)
+ }
+ }
+ } else {
+ ctx.Data["ServiceNotEnabled"] = true
+ }
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+
+ user := user_model.VerifyUserActiveCode(ctx, code)
+ // if code is wrong
+ if user == nil {
+ ctx.Data["IsCodeInvalid"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+
+ // if account is local account, verify password
+ if user.LoginSource == 0 {
+ ctx.Data["Code"] = code
+ ctx.Data["NeedsPassword"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+
+ handleAccountActivation(ctx, user)
+}
+
+// ActivatePost handles account activation with password check
+func ActivatePost(ctx *context.Context) {
+ code := ctx.FormString("code")
+ if len(code) == 0 {
+ email := ctx.FormString("email")
+ if len(email) > 0 {
+ ctx.Data["IsActivatePage"] = true
+ if ctx.Doer == nil || ctx.Doer.IsActive {
+ ctx.NotFound("invalid user", nil)
+ return
+ }
+ // Change the primary email
+ if setting.Service.RegisterEmailConfirm {
+ if ctx.Cache.IsExist("MailChangeLimit_" + ctx.Doer.LowerName) {
+ ctx.Data["ResendLimited"] = true
+ } else {
+ ctx.Data["ActiveCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)
+ err := user_service.ReplaceInactivePrimaryEmail(ctx, ctx.Doer.Email, &user_model.EmailAddress{
+ UID: ctx.Doer.ID,
+ Email: email,
+ })
+ if err != nil {
+ ctx.Data["IsActivatePage"] = false
+ log.Error("Couldn't replace inactive primary email of user %d: %v", ctx.Doer.ID, err)
+ ctx.RenderWithErr(ctx.Tr("auth.change_unconfirmed_email_error", err), TplActivate, nil)
+ return
+ }
+ if err := ctx.Cache.Put("MailChangeLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
+ log.Error("Set cache(MailChangeLimit) fail: %v", err)
+ }
+ if err := ctx.Cache.Put("MailChangedJustNow_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
+ log.Error("Set cache(MailChangedJustNow) fail: %v", err)
+ }
+
+ // Confirmation mail will be re-sent after the redirect to `/user/activate` below.
+ }
+ } else {
+ ctx.Data["ServiceNotEnabled"] = true
+ }
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/activate")
+ return
+ }
+
+ user := user_model.VerifyUserActiveCode(ctx, code)
+ // if code is wrong
+ if user == nil {
+ ctx.Data["IsCodeInvalid"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+
+ // if account is local account, verify password
+ if user.LoginSource == 0 {
+ password := ctx.FormString("password")
+ if len(password) == 0 {
+ ctx.Data["Code"] = code
+ ctx.Data["NeedsPassword"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+ if !user.ValidatePassword(password) {
+ ctx.Data["IsPasswordInvalid"] = true
+ ctx.HTML(http.StatusOK, TplActivate)
+ return
+ }
+ }
+
+ handleAccountActivation(ctx, user)
+}
+
+func handleAccountActivation(ctx *context.Context, user *user_model.User) {
+ user.IsActive = true
+ var err error
+ if user.Rands, err = user_model.GetUserSalt(); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+ if err := user_model.UpdateUserCols(ctx, user, "is_active", "rands"); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("UpdateUserCols", err)
+ } else {
+ ctx.ServerError("UpdateUser", err)
+ }
+ return
+ }
+
+ if err := user_model.ActivateUserEmail(ctx, user.ID, user.Email, true); err != nil {
+ log.Error("Unable to activate email for user: %-v with email: %s: %v", user, user.Email, err)
+ ctx.ServerError("ActivateUserEmail", err)
+ return
+ }
+
+ log.Trace("User activated: %s", user.Name)
+
+ if err := updateSession(ctx, nil, map[string]any{
+ "uid": user.ID,
+ }); err != nil {
+ log.Error("Unable to regenerate session for user: %-v with email: %s: %v", user, user.Email, err)
+ ctx.ServerError("ActivateUserEmail", err)
+ return
+ }
+
+ if err := resetLocale(ctx, user); err != nil {
+ ctx.ServerError("resetLocale", err)
+ return
+ }
+
+ if err := user_service.UpdateUser(ctx, user, &user_service.UpdateOptions{SetLastLogin: true}); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("auth.account_activated"))
+ if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 {
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ ctx.RedirectToFirst(redirectTo)
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/")
+}
+
+// ActivateEmail render the activate email page
+func ActivateEmail(ctx *context.Context) {
+ code := ctx.FormString("code")
+ emailStr := ctx.FormString("email")
+
+ // Verify code.
+ if email := user_model.VerifyActiveEmailCode(ctx, code, emailStr); email != nil {
+ if err := user_model.ActivateEmail(ctx, email); err != nil {
+ ctx.ServerError("ActivateEmail", err)
+ return
+ }
+
+ log.Trace("Email activated: %s", email.Email)
+ ctx.Flash.Success(ctx.Tr("settings.add_email_success"))
+
+ if u, err := user_model.GetUserByID(ctx, email.UID); err != nil {
+ log.Warn("GetUserByID: %d", email.UID)
+ } else {
+ // Allow user to validate more emails
+ _ = ctx.Cache.Delete("MailResendLimit_" + u.LowerName)
+ }
+ }
+
+ // FIXME: e-mail verification does not require the user to be logged in,
+ // so this could be redirecting to the login page.
+ // Should users be logged in automatically here? (consider 2FA requirements, etc.)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+}
+
+func updateSession(ctx *context.Context, deletes []string, updates map[string]any) error {
+ if _, err := session.RegenerateSession(ctx.Resp, ctx.Req); err != nil {
+ return fmt.Errorf("regenerate session: %w", err)
+ }
+ sess := ctx.Session
+ sessID := sess.ID()
+ for _, k := range deletes {
+ if err := sess.Delete(k); err != nil {
+ return fmt.Errorf("delete %v in session[%s]: %w", k, sessID, err)
+ }
+ }
+ for k, v := range updates {
+ if err := sess.Set(k, v); err != nil {
+ return fmt.Errorf("set %v in session[%s]: %w", k, sessID, err)
+ }
+ }
+ if err := sess.Release(); err != nil {
+ return fmt.Errorf("store session[%s]: %w", sessID, err)
+ }
+ return nil
+}
diff --git a/routers/web/auth/auth_test.go b/routers/web/auth/auth_test.go
new file mode 100644
index 0000000..c6afbf8
--- /dev/null
+++ b/routers/web/auth/auth_test.go
@@ -0,0 +1,43 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "net/http"
+ "net/url"
+ "testing"
+
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUserLogin(t *testing.T) {
+ ctx, resp := contexttest.MockContext(t, "/user/login")
+ SignIn(ctx)
+ assert.Equal(t, http.StatusOK, resp.Code)
+
+ ctx, resp = contexttest.MockContext(t, "/user/login")
+ ctx.IsSigned = true
+ SignIn(ctx)
+ assert.Equal(t, http.StatusSeeOther, resp.Code)
+ assert.Equal(t, "/", test.RedirectURL(resp))
+
+ ctx, resp = contexttest.MockContext(t, "/user/login?redirect_to=/other")
+ ctx.IsSigned = true
+ SignIn(ctx)
+ assert.Equal(t, "/other", test.RedirectURL(resp))
+
+ ctx, resp = contexttest.MockContext(t, "/user/login")
+ ctx.Req.AddCookie(&http.Cookie{Name: "redirect_to", Value: "/other-cookie"})
+ ctx.IsSigned = true
+ SignIn(ctx)
+ assert.Equal(t, "/other-cookie", test.RedirectURL(resp))
+
+ ctx, resp = contexttest.MockContext(t, "/user/login?redirect_to="+url.QueryEscape("https://example.com"))
+ ctx.IsSigned = true
+ SignIn(ctx)
+ assert.Equal(t, "/", test.RedirectURL(resp))
+}
diff --git a/routers/web/auth/linkaccount.go b/routers/web/auth/linkaccount.go
new file mode 100644
index 0000000..9b0141c
--- /dev/null
+++ b/routers/web/auth/linkaccount.go
@@ -0,0 +1,308 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ auth_service "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/externalaccount"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/markbates/goth"
+)
+
+var tplLinkAccount base.TplName = "user/auth/link_account"
+
+// LinkAccount shows the page where the user can decide to login or create a new account
+func LinkAccount(ctx *context.Context) {
+ ctx.Data["DisablePassword"] = !setting.Service.RequireExternalRegistrationPassword || setting.Service.AllowOnlyExternalRegistration
+ ctx.Data["Title"] = ctx.Tr("link_account")
+ ctx.Data["LinkAccountMode"] = true
+ ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha && setting.Service.RequireExternalRegistrationCaptcha
+ ctx.Data["Captcha"] = context.GetImageCaptcha()
+ ctx.Data["CaptchaType"] = setting.Service.CaptchaType
+ ctx.Data["RecaptchaURL"] = setting.Service.RecaptchaURL
+ ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey
+ ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
+ ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
+ ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
+ ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
+ ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration
+ ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
+ ctx.Data["ShowRegistrationButton"] = false
+
+ // use this to set the right link into the signIn and signUp templates in the link_account template
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
+ ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/link_account_signup"
+
+ gothUser := ctx.Session.Get("linkAccountGothUser")
+ if gothUser == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in LinkAccount session"))
+ return
+ }
+
+ gu, _ := gothUser.(goth.User)
+ uname, err := getUserName(&gu)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ email := gu.Email
+ ctx.Data["user_name"] = uname
+ ctx.Data["email"] = email
+
+ if len(email) != 0 {
+ u, err := user_model.GetUserByEmail(ctx, email)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ if u != nil {
+ ctx.Data["user_exists"] = true
+ }
+ } else if len(uname) != 0 {
+ u, err := user_model.GetUserByName(ctx, uname)
+ if err != nil && !user_model.IsErrUserNotExist(err) {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ if u != nil {
+ ctx.Data["user_exists"] = true
+ }
+ }
+
+ ctx.HTML(http.StatusOK, tplLinkAccount)
+}
+
+func handleSignInError(ctx *context.Context, userName string, ptrForm any, tmpl base.TplName, invoker string, err error) {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.RenderWithErr(ctx.Tr("form.username_password_incorrect"), tmpl, ptrForm)
+ } else if errors.Is(err, util.ErrInvalidArgument) {
+ ctx.Data["user_exists"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_password_incorrect"), tmpl, ptrForm)
+ } else if user_model.IsErrUserProhibitLogin(err) {
+ ctx.Data["user_exists"] = true
+ log.Info("Failed authentication attempt for %s from %s: %v", userName, ctx.RemoteAddr(), err)
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ } else if user_model.IsErrUserInactive(err) {
+ ctx.Data["user_exists"] = true
+ if setting.Service.RegisterEmailConfirm {
+ ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
+ ctx.HTML(http.StatusOK, TplActivate)
+ } else {
+ log.Info("Failed authentication attempt for %s from %s: %v", userName, ctx.RemoteAddr(), err)
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ }
+ } else {
+ ctx.ServerError(invoker, err)
+ }
+}
+
+// LinkAccountPostSignIn handle the coupling of external account with another account using signIn
+func LinkAccountPostSignIn(ctx *context.Context) {
+ signInForm := web.GetForm(ctx).(*forms.SignInForm)
+ ctx.Data["DisablePassword"] = !setting.Service.RequireExternalRegistrationPassword || setting.Service.AllowOnlyExternalRegistration
+ ctx.Data["Title"] = ctx.Tr("link_account")
+ ctx.Data["LinkAccountMode"] = true
+ ctx.Data["LinkAccountModeSignIn"] = true
+ ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha && setting.Service.RequireExternalRegistrationCaptcha
+ ctx.Data["RecaptchaURL"] = setting.Service.RecaptchaURL
+ ctx.Data["Captcha"] = context.GetImageCaptcha()
+ ctx.Data["CaptchaType"] = setting.Service.CaptchaType
+ ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey
+ ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
+ ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
+ ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
+ ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
+ ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration
+ ctx.Data["ShowRegistrationButton"] = false
+
+ // use this to set the right link into the signIn and signUp templates in the link_account template
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
+ ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/link_account_signup"
+
+ gothUser := ctx.Session.Get("linkAccountGothUser")
+ if gothUser == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in LinkAccount session"))
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplLinkAccount)
+ return
+ }
+
+ u, _, err := auth_service.UserSignIn(ctx, signInForm.UserName, signInForm.Password)
+ if err != nil {
+ handleSignInError(ctx, signInForm.UserName, &signInForm, tplLinkAccount, "UserLinkAccount", err)
+ return
+ }
+
+ linkAccount(ctx, u, gothUser.(goth.User), signInForm.Remember)
+}
+
+func linkAccount(ctx *context.Context, u *user_model.User, gothUser goth.User, remember bool) {
+ updateAvatarIfNeed(ctx, gothUser.AvatarURL, u)
+
+ // If this user is enrolled in 2FA, we can't sign the user in just yet.
+ // Instead, redirect them to the 2FA authentication page.
+ // We deliberately ignore the skip local 2fa setting here because we are linking to a previous user here
+ _, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil {
+ if !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("UserLinkAccount", err)
+ return
+ }
+
+ err = externalaccount.LinkAccountToUser(ctx, u, gothUser)
+ if err != nil {
+ ctx.ServerError("UserLinkAccount", err)
+ return
+ }
+
+ handleSignIn(ctx, u, remember)
+ return
+ }
+
+ if err := updateSession(ctx, nil, map[string]any{
+ // User needs to use 2FA, save data and redirect to 2FA page.
+ "twofaUid": u.ID,
+ "twofaRemember": remember,
+ "linkAccount": true,
+ }); err != nil {
+ ctx.ServerError("RegenerateSession", err)
+ return
+ }
+
+ // If WebAuthn is enrolled -> Redirect to WebAuthn instead
+ regs, err := auth.GetWebAuthnCredentialsByUID(ctx, u.ID)
+ if err == nil && len(regs) > 0 {
+ ctx.Redirect(setting.AppSubURL + "/user/webauthn")
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/two_factor")
+}
+
+// LinkAccountPostRegister handle the creation of a new account for an external account using signUp
+func LinkAccountPostRegister(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RegisterForm)
+ // TODO Make insecure passwords optional for local accounts also,
+ // once email-based Second-Factor Auth is available
+ ctx.Data["DisablePassword"] = !setting.Service.RequireExternalRegistrationPassword || setting.Service.AllowOnlyExternalRegistration
+ ctx.Data["Title"] = ctx.Tr("link_account")
+ ctx.Data["LinkAccountMode"] = true
+ ctx.Data["LinkAccountModeRegister"] = true
+ ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha && setting.Service.RequireExternalRegistrationCaptcha
+ ctx.Data["RecaptchaURL"] = setting.Service.RecaptchaURL
+ ctx.Data["Captcha"] = context.GetImageCaptcha()
+ ctx.Data["CaptchaType"] = setting.Service.CaptchaType
+ ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey
+ ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
+ ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
+ ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
+ ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
+ ctx.Data["DisableRegistration"] = setting.Service.DisableRegistration
+ ctx.Data["ShowRegistrationButton"] = false
+
+ // use this to set the right link into the signIn and signUp templates in the link_account template
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/link_account_signin"
+ ctx.Data["SignUpLink"] = setting.AppSubURL + "/user/link_account_signup"
+
+ gothUserInterface := ctx.Session.Get("linkAccountGothUser")
+ if gothUserInterface == nil {
+ ctx.ServerError("UserSignUp", errors.New("not in LinkAccount session"))
+ return
+ }
+ gothUser, ok := gothUserInterface.(goth.User)
+ if !ok {
+ ctx.ServerError("UserSignUp", fmt.Errorf("session linkAccountGothUser type is %t but not goth.User", gothUserInterface))
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplLinkAccount)
+ return
+ }
+
+ if setting.Service.DisableRegistration || setting.Service.AllowOnlyInternalRegistration {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if setting.Service.EnableCaptcha && setting.Service.RequireExternalRegistrationCaptcha {
+ context.VerifyCaptcha(ctx, tplLinkAccount, form)
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if !form.IsEmailDomainAllowed() {
+ ctx.RenderWithErr(ctx.Tr("auth.email_domain_blacklisted"), tplLinkAccount, &form)
+ return
+ }
+
+ if setting.Service.AllowOnlyExternalRegistration || !setting.Service.RequireExternalRegistrationPassword {
+ // In user_model.User an empty password is classed as not set, so we set form.Password to empty.
+ // Eventually the database should be changed to indicate "Second Factor"-enabled accounts
+ // (accounts that do not introduce the security vulnerabilities of a password).
+ // If a user decides to circumvent second-factor security, and purposefully create a password,
+ // they can still do so using the "Recover Account" option.
+ form.Password = ""
+ } else {
+ if (len(strings.TrimSpace(form.Password)) > 0 || len(strings.TrimSpace(form.Retype)) > 0) && form.Password != form.Retype {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("form.password_not_match"), tplLinkAccount, &form)
+ return
+ }
+ if len(strings.TrimSpace(form.Password)) > 0 && len(form.Password) < setting.MinPasswordLength {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplLinkAccount, &form)
+ return
+ }
+ }
+
+ authSource, err := auth.GetActiveOAuth2SourceByName(ctx, gothUser.Provider)
+ if err != nil {
+ ctx.ServerError("CreateUser", err)
+ return
+ }
+
+ u := &user_model.User{
+ Name: form.UserName,
+ Email: form.Email,
+ Passwd: form.Password,
+ LoginType: auth.OAuth2,
+ LoginSource: authSource.ID,
+ LoginName: gothUser.UserID,
+ }
+
+ if !createAndHandleCreatedUser(ctx, tplLinkAccount, form, u, nil, &gothUser, false) {
+ // error already handled
+ return
+ }
+
+ source := authSource.Cfg.(*oauth2.Source)
+ if err := syncGroupsToTeams(ctx, source, &gothUser, u); err != nil {
+ ctx.ServerError("SyncGroupsToTeams", err)
+ return
+ }
+
+ handleSignIn(ctx, u, false)
+}
diff --git a/routers/web/auth/main_test.go b/routers/web/auth/main_test.go
new file mode 100644
index 0000000..b438e5d
--- /dev/null
+++ b/routers/web/auth/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/auth/oauth.go b/routers/web/auth/oauth.go
new file mode 100644
index 0000000..0626157
--- /dev/null
+++ b/routers/web/auth/oauth.go
@@ -0,0 +1,1422 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ go_context "context"
+ "crypto/sha256"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "html"
+ "html/template"
+ "io"
+ "net/http"
+ "net/url"
+ "sort"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ org_model "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ auth_module "code.gitea.io/gitea/modules/auth"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ auth_service "code.gitea.io/gitea/services/auth"
+ source_service "code.gitea.io/gitea/services/auth/source"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/externalaccount"
+ "code.gitea.io/gitea/services/forms"
+ remote_service "code.gitea.io/gitea/services/remote"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "gitea.com/go-chi/binding"
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/markbates/goth"
+ "github.com/markbates/goth/gothic"
+ "github.com/markbates/goth/providers/fitbit"
+ "github.com/markbates/goth/providers/openidConnect"
+ "github.com/markbates/goth/providers/zoom"
+ go_oauth2 "golang.org/x/oauth2"
+)
+
+const (
+ tplGrantAccess base.TplName = "user/auth/grant"
+ tplGrantError base.TplName = "user/auth/grant_error"
+)
+
+// TODO move error and responses to SDK or models
+
+// AuthorizeErrorCode represents an error code specified in RFC 6749
+// https://datatracker.ietf.org/doc/html/rfc6749#section-4.2.2.1
+type AuthorizeErrorCode string
+
+const (
+ // ErrorCodeInvalidRequest represents the according error in RFC 6749
+ ErrorCodeInvalidRequest AuthorizeErrorCode = "invalid_request"
+ // ErrorCodeUnauthorizedClient represents the according error in RFC 6749
+ ErrorCodeUnauthorizedClient AuthorizeErrorCode = "unauthorized_client"
+ // ErrorCodeAccessDenied represents the according error in RFC 6749
+ ErrorCodeAccessDenied AuthorizeErrorCode = "access_denied"
+ // ErrorCodeUnsupportedResponseType represents the according error in RFC 6749
+ ErrorCodeUnsupportedResponseType AuthorizeErrorCode = "unsupported_response_type"
+ // ErrorCodeInvalidScope represents the according error in RFC 6749
+ ErrorCodeInvalidScope AuthorizeErrorCode = "invalid_scope"
+ // ErrorCodeServerError represents the according error in RFC 6749
+ ErrorCodeServerError AuthorizeErrorCode = "server_error"
+ // ErrorCodeTemporaryUnavailable represents the according error in RFC 6749
+ ErrorCodeTemporaryUnavailable AuthorizeErrorCode = "temporarily_unavailable"
+)
+
+// AuthorizeError represents an error type specified in RFC 6749
+// https://datatracker.ietf.org/doc/html/rfc6749#section-4.2.2.1
+type AuthorizeError struct {
+ ErrorCode AuthorizeErrorCode `json:"error" form:"error"`
+ ErrorDescription string
+ State string
+}
+
+// Error returns the error message
+func (err AuthorizeError) Error() string {
+ return fmt.Sprintf("%s: %s", err.ErrorCode, err.ErrorDescription)
+}
+
+// AccessTokenErrorCode represents an error code specified in RFC 6749
+// https://datatracker.ietf.org/doc/html/rfc6749#section-5.2
+type AccessTokenErrorCode string
+
+const (
+ // AccessTokenErrorCodeInvalidRequest represents an error code specified in RFC 6749
+ AccessTokenErrorCodeInvalidRequest AccessTokenErrorCode = "invalid_request"
+ // AccessTokenErrorCodeInvalidClient represents an error code specified in RFC 6749
+ AccessTokenErrorCodeInvalidClient = "invalid_client"
+ // AccessTokenErrorCodeInvalidGrant represents an error code specified in RFC 6749
+ AccessTokenErrorCodeInvalidGrant = "invalid_grant"
+ // AccessTokenErrorCodeUnauthorizedClient represents an error code specified in RFC 6749
+ AccessTokenErrorCodeUnauthorizedClient = "unauthorized_client"
+ // AccessTokenErrorCodeUnsupportedGrantType represents an error code specified in RFC 6749
+ AccessTokenErrorCodeUnsupportedGrantType = "unsupported_grant_type"
+ // AccessTokenErrorCodeInvalidScope represents an error code specified in RFC 6749
+ AccessTokenErrorCodeInvalidScope = "invalid_scope"
+)
+
+// AccessTokenError represents an error response specified in RFC 6749
+// https://datatracker.ietf.org/doc/html/rfc6749#section-5.2
+type AccessTokenError struct {
+ ErrorCode AccessTokenErrorCode `json:"error" form:"error"`
+ ErrorDescription string `json:"error_description"`
+}
+
+// Error returns the error message
+func (err AccessTokenError) Error() string {
+ return fmt.Sprintf("%s: %s", err.ErrorCode, err.ErrorDescription)
+}
+
+// errCallback represents a oauth2 callback error
+type errCallback struct {
+ Code string
+ Description string
+}
+
+func (err errCallback) Error() string {
+ return err.Description
+}
+
+// TokenType specifies the kind of token
+type TokenType string
+
+const (
+ // TokenTypeBearer represents a token type specified in RFC 6749
+ TokenTypeBearer TokenType = "bearer"
+ // TokenTypeMAC represents a token type specified in RFC 6749
+ TokenTypeMAC = "mac"
+)
+
+// AccessTokenResponse represents a successful access token response
+// https://datatracker.ietf.org/doc/html/rfc6749#section-4.2.2
+type AccessTokenResponse struct {
+ AccessToken string `json:"access_token"`
+ TokenType TokenType `json:"token_type"`
+ ExpiresIn int64 `json:"expires_in"`
+ RefreshToken string `json:"refresh_token"`
+ IDToken string `json:"id_token,omitempty"`
+}
+
+func newAccessTokenResponse(ctx go_context.Context, grant *auth.OAuth2Grant, serverKey, clientKey oauth2.JWTSigningKey) (*AccessTokenResponse, *AccessTokenError) {
+ if setting.OAuth2.InvalidateRefreshTokens {
+ if err := grant.IncreaseCounter(ctx); err != nil {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidGrant,
+ ErrorDescription: "cannot increase the grant counter",
+ }
+ }
+ }
+ // generate access token to access the API
+ expirationDate := timeutil.TimeStampNow().Add(setting.OAuth2.AccessTokenExpirationTime)
+ accessToken := &oauth2.Token{
+ GrantID: grant.ID,
+ Type: oauth2.TypeAccessToken,
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(expirationDate.AsTime()),
+ },
+ }
+ signedAccessToken, err := accessToken.SignToken(serverKey)
+ if err != nil {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot sign token",
+ }
+ }
+
+ // generate refresh token to request an access token after it expired later
+ refreshExpirationDate := timeutil.TimeStampNow().Add(setting.OAuth2.RefreshTokenExpirationTime * 60 * 60).AsTime()
+ refreshToken := &oauth2.Token{
+ GrantID: grant.ID,
+ Counter: grant.Counter,
+ Type: oauth2.TypeRefreshToken,
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(refreshExpirationDate),
+ },
+ }
+ signedRefreshToken, err := refreshToken.SignToken(serverKey)
+ if err != nil {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot sign token",
+ }
+ }
+
+ // generate OpenID Connect id_token
+ signedIDToken := ""
+ if grant.ScopeContains("openid") {
+ app, err := auth.GetOAuth2ApplicationByID(ctx, grant.ApplicationID)
+ if err != nil {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot find application",
+ }
+ }
+ user, err := user_model.GetUserByID(ctx, grant.UserID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot find user",
+ }
+ }
+ log.Error("Error loading user: %v", err)
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "server error",
+ }
+ }
+
+ idToken := &oauth2.OIDCToken{
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(expirationDate.AsTime()),
+ Issuer: setting.AppURL,
+ Audience: []string{app.ClientID},
+ Subject: fmt.Sprint(grant.UserID),
+ },
+ Nonce: grant.Nonce,
+ }
+ if grant.ScopeContains("profile") {
+ idToken.Name = user.GetDisplayName()
+ idToken.PreferredUsername = user.Name
+ idToken.Profile = user.HTMLURL()
+ idToken.Picture = user.AvatarLink(ctx)
+ idToken.Website = user.Website
+ idToken.Locale = user.Language
+ idToken.UpdatedAt = user.UpdatedUnix
+ }
+ if grant.ScopeContains("email") {
+ idToken.Email = user.Email
+ idToken.EmailVerified = user.IsActive
+ }
+ if grant.ScopeContains("groups") {
+ onlyPublicGroups := ifOnlyPublicGroups(grant.Scope)
+
+ groups, err := getOAuthGroupsForUser(ctx, user, onlyPublicGroups)
+ if err != nil {
+ log.Error("Error getting groups: %v", err)
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "server error",
+ }
+ }
+ idToken.Groups = groups
+ }
+
+ signedIDToken, err = idToken.SignToken(clientKey)
+ if err != nil {
+ return nil, &AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot sign token",
+ }
+ }
+ }
+
+ return &AccessTokenResponse{
+ AccessToken: signedAccessToken,
+ TokenType: TokenTypeBearer,
+ ExpiresIn: setting.OAuth2.AccessTokenExpirationTime,
+ RefreshToken: signedRefreshToken,
+ IDToken: signedIDToken,
+ }, nil
+}
+
+type userInfoResponse struct {
+ Sub string `json:"sub"`
+ Name string `json:"name"`
+ Username string `json:"preferred_username"`
+ Email string `json:"email"`
+ Picture string `json:"picture"`
+ Groups []string `json:"groups,omitempty"`
+}
+
+func ifOnlyPublicGroups(scopes string) bool {
+ scopes = strings.ReplaceAll(scopes, ",", " ")
+ scopesList := strings.Fields(scopes)
+ for _, scope := range scopesList {
+ if scope == "all" || scope == "read:organization" || scope == "read:admin" {
+ return false
+ }
+ }
+ return true
+}
+
+// InfoOAuth manages request for userinfo endpoint
+func InfoOAuth(ctx *context.Context) {
+ if ctx.Doer == nil || ctx.Data["AuthedMethod"] != (&auth_service.OAuth2{}).Name() {
+ ctx.Resp.Header().Set("WWW-Authenticate", `Bearer realm=""`)
+ ctx.PlainText(http.StatusUnauthorized, "no valid authorization")
+ return
+ }
+
+ response := &userInfoResponse{
+ Sub: fmt.Sprint(ctx.Doer.ID),
+ Name: ctx.Doer.FullName,
+ Username: ctx.Doer.Name,
+ Email: ctx.Doer.Email,
+ Picture: ctx.Doer.AvatarLink(ctx),
+ }
+
+ var token string
+ if auHead := ctx.Req.Header.Get("Authorization"); auHead != "" {
+ auths := strings.Fields(auHead)
+ if len(auths) == 2 && (auths[0] == "token" || strings.ToLower(auths[0]) == "bearer") {
+ token = auths[1]
+ }
+ }
+
+ _, grantScopes := auth_service.CheckOAuthAccessToken(ctx, token)
+ onlyPublicGroups := ifOnlyPublicGroups(grantScopes)
+
+ groups, err := getOAuthGroupsForUser(ctx, ctx.Doer, onlyPublicGroups)
+ if err != nil {
+ ctx.ServerError("Oauth groups for user", err)
+ return
+ }
+ response.Groups = groups
+
+ ctx.JSON(http.StatusOK, response)
+}
+
+// returns a list of "org" and "org:team" strings,
+// that the given user is a part of.
+func getOAuthGroupsForUser(ctx go_context.Context, user *user_model.User, onlyPublicGroups bool) ([]string, error) {
+ orgs, err := org_model.GetUserOrgsList(ctx, user)
+ if err != nil {
+ return nil, fmt.Errorf("GetUserOrgList: %w", err)
+ }
+
+ var groups []string
+ for _, org := range orgs {
+ if setting.OAuth2.EnableAdditionalGrantScopes {
+ if onlyPublicGroups {
+ public, err := org_model.IsPublicMembership(ctx, org.ID, user.ID)
+ if !public && err == nil {
+ continue
+ }
+ }
+ }
+
+ groups = append(groups, org.Name)
+ teams, err := org.LoadTeams(ctx)
+ if err != nil {
+ return nil, fmt.Errorf("LoadTeams: %w", err)
+ }
+ for _, team := range teams {
+ if team.IsMember(ctx, user.ID) {
+ groups = append(groups, org.Name+":"+team.LowerName)
+ }
+ }
+ }
+ return groups, nil
+}
+
+func parseBasicAuth(ctx *context.Context) (username, password string, err error) {
+ authHeader := ctx.Req.Header.Get("Authorization")
+ if authType, authData, ok := strings.Cut(authHeader, " "); ok && strings.EqualFold(authType, "Basic") {
+ return base.BasicAuthDecode(authData)
+ }
+ return "", "", errors.New("invalid basic authentication")
+}
+
+// IntrospectOAuth introspects an oauth token
+func IntrospectOAuth(ctx *context.Context) {
+ clientIDValid := false
+ if clientID, clientSecret, err := parseBasicAuth(ctx); err == nil {
+ app, err := auth.GetOAuth2ApplicationByClientID(ctx, clientID)
+ if err != nil && !auth.IsErrOauthClientIDInvalid(err) {
+ // this is likely a database error; log it and respond without details
+ log.Error("Error retrieving client_id: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ clientIDValid = err == nil && app.ValidateClientSecret([]byte(clientSecret))
+ }
+ if !clientIDValid {
+ ctx.Resp.Header().Set("WWW-Authenticate", `Basic realm=""`)
+ ctx.PlainText(http.StatusUnauthorized, "no valid authorization")
+ return
+ }
+
+ var response struct {
+ Active bool `json:"active"`
+ Scope string `json:"scope,omitempty"`
+ Username string `json:"username,omitempty"`
+ jwt.RegisteredClaims
+ }
+
+ form := web.GetForm(ctx).(*forms.IntrospectTokenForm)
+ token, err := oauth2.ParseToken(form.Token, oauth2.DefaultSigningKey)
+ if err == nil {
+ grant, err := auth.GetOAuth2GrantByID(ctx, token.GrantID)
+ if err == nil && grant != nil {
+ app, err := auth.GetOAuth2ApplicationByID(ctx, grant.ApplicationID)
+ if err == nil && app != nil {
+ response.Active = true
+ response.Scope = grant.Scope
+ response.Issuer = setting.AppURL
+ response.Audience = []string{app.ClientID}
+ response.Subject = fmt.Sprint(grant.UserID)
+ }
+ if user, err := user_model.GetUserByID(ctx, grant.UserID); err == nil {
+ response.Username = user.Name
+ }
+ }
+ }
+
+ ctx.JSON(http.StatusOK, response)
+}
+
+// AuthorizeOAuth manages authorize requests
+func AuthorizeOAuth(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AuthorizationForm)
+ errs := binding.Errors{}
+ errs = form.Validate(ctx.Req, errs)
+ if len(errs) > 0 {
+ errstring := ""
+ for _, e := range errs {
+ errstring += e.Error() + "\n"
+ }
+ ctx.ServerError("AuthorizeOAuth: Validate: ", fmt.Errorf("errors occurred during validation: %s", errstring))
+ return
+ }
+
+ app, err := auth.GetOAuth2ApplicationByClientID(ctx, form.ClientID)
+ if err != nil {
+ if auth.IsErrOauthClientIDInvalid(err) {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeUnauthorizedClient,
+ ErrorDescription: "Client ID not registered",
+ State: form.State,
+ }, "")
+ return
+ }
+ ctx.ServerError("GetOAuth2ApplicationByClientID", err)
+ return
+ }
+
+ var user *user_model.User
+ if app.UID != 0 {
+ user, err = user_model.GetUserByID(ctx, app.UID)
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+ }
+
+ if !app.ContainsRedirectURI(form.RedirectURI) {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeInvalidRequest,
+ ErrorDescription: "Unregistered Redirect URI",
+ State: form.State,
+ }, "")
+ return
+ }
+
+ if form.ResponseType != "code" {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeUnsupportedResponseType,
+ ErrorDescription: "Only code response type is supported.",
+ State: form.State,
+ }, form.RedirectURI)
+ return
+ }
+
+ // pkce support
+ switch form.CodeChallengeMethod {
+ case "S256":
+ case "plain":
+ if err := ctx.Session.Set("CodeChallengeMethod", form.CodeChallengeMethod); err != nil {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeServerError,
+ ErrorDescription: "cannot set code challenge method",
+ State: form.State,
+ }, form.RedirectURI)
+ return
+ }
+ if err := ctx.Session.Set("CodeChallengeMethod", form.CodeChallenge); err != nil {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeServerError,
+ ErrorDescription: "cannot set code challenge",
+ State: form.State,
+ }, form.RedirectURI)
+ return
+ }
+ // Here we're just going to try to release the session early
+ if err := ctx.Session.Release(); err != nil {
+ // we'll tolerate errors here as they *should* get saved elsewhere
+ log.Error("Unable to save changes to the session: %v", err)
+ }
+ case "":
+ // "Authorization servers SHOULD reject authorization requests from native apps that don't use PKCE by returning an error message"
+ // https://datatracker.ietf.org/doc/html/rfc8252#section-8.1
+ if !app.ConfidentialClient {
+ // "the authorization endpoint MUST return the authorization error response with the "error" value set to "invalid_request""
+ // https://datatracker.ietf.org/doc/html/rfc7636#section-4.4.1
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeInvalidRequest,
+ ErrorDescription: "PKCE is required for public clients",
+ State: form.State,
+ }, form.RedirectURI)
+ return
+ }
+ default:
+ // "If the server supporting PKCE does not support the requested transformation, the authorization endpoint MUST return the authorization error response with "error" value set to "invalid_request"."
+ // https://www.rfc-editor.org/rfc/rfc7636#section-4.4.1
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeInvalidRequest,
+ ErrorDescription: "unsupported code challenge method",
+ State: form.State,
+ }, form.RedirectURI)
+ return
+ }
+
+ grant, err := app.GetGrantByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+
+ // Redirect if user already granted access and the application is confidential.
+ // I.e. always require authorization for public clients as recommended by RFC 6749 Section 10.2
+ if app.ConfidentialClient && grant != nil {
+ code, err := grant.GenerateNewAuthorizationCode(ctx, form.RedirectURI, form.CodeChallenge, form.CodeChallengeMethod)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+ redirect, err := code.GenerateRedirectURI(form.State)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+ // Update nonce to reflect the new session
+ if len(form.Nonce) > 0 {
+ err := grant.SetNonce(ctx, form.Nonce)
+ if err != nil {
+ log.Error("Unable to update nonce: %v", err)
+ }
+ }
+ ctx.Redirect(redirect.String())
+ return
+ }
+
+ // show authorize page to grant access
+ ctx.Data["Application"] = app
+ ctx.Data["RedirectURI"] = form.RedirectURI
+ ctx.Data["State"] = form.State
+ ctx.Data["Scope"] = form.Scope
+ ctx.Data["Nonce"] = form.Nonce
+ if user != nil {
+ ctx.Data["ApplicationCreatorLinkHTML"] = template.HTML(fmt.Sprintf(`<a href="%s">@%s</a>`, html.EscapeString(user.HomeLink()), html.EscapeString(user.Name)))
+ } else {
+ ctx.Data["ApplicationCreatorLinkHTML"] = template.HTML(fmt.Sprintf(`<a href="%s">%s</a>`, html.EscapeString(setting.AppSubURL+"/"), html.EscapeString(setting.AppName)))
+ }
+ ctx.Data["ApplicationRedirectDomainHTML"] = template.HTML("<strong>" + html.EscapeString(form.RedirectURI) + "</strong>")
+ // TODO document SESSION <=> FORM
+ err = ctx.Session.Set("client_id", app.ClientID)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ log.Error(err.Error())
+ return
+ }
+ err = ctx.Session.Set("redirect_uri", form.RedirectURI)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ log.Error(err.Error())
+ return
+ }
+ err = ctx.Session.Set("state", form.State)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ log.Error(err.Error())
+ return
+ }
+ // Here we're just going to try to release the session early
+ if err := ctx.Session.Release(); err != nil {
+ // we'll tolerate errors here as they *should* get saved elsewhere
+ log.Error("Unable to save changes to the session: %v", err)
+ }
+ ctx.HTML(http.StatusOK, tplGrantAccess)
+}
+
+// GrantApplicationOAuth manages the post request submitted when a user grants access to an application
+func GrantApplicationOAuth(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.GrantApplicationForm)
+ if ctx.Session.Get("client_id") != form.ClientID || ctx.Session.Get("state") != form.State ||
+ ctx.Session.Get("redirect_uri") != form.RedirectURI {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ if !form.Granted {
+ handleAuthorizeError(ctx, AuthorizeError{
+ State: form.State,
+ ErrorDescription: "the request is denied",
+ ErrorCode: ErrorCodeAccessDenied,
+ }, form.RedirectURI)
+ return
+ }
+
+ app, err := auth.GetOAuth2ApplicationByClientID(ctx, form.ClientID)
+ if err != nil {
+ ctx.ServerError("GetOAuth2ApplicationByClientID", err)
+ return
+ }
+ grant, err := app.GetGrantByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+ if grant == nil {
+ grant, err = app.CreateGrant(ctx, ctx.Doer.ID, form.Scope)
+ if err != nil {
+ handleAuthorizeError(ctx, AuthorizeError{
+ State: form.State,
+ ErrorDescription: "cannot create grant for user",
+ ErrorCode: ErrorCodeServerError,
+ }, form.RedirectURI)
+ return
+ }
+ } else if grant.Scope != form.Scope {
+ handleAuthorizeError(ctx, AuthorizeError{
+ State: form.State,
+ ErrorDescription: "a grant exists with different scope",
+ ErrorCode: ErrorCodeServerError,
+ }, form.RedirectURI)
+ return
+ }
+
+ if len(form.Nonce) > 0 {
+ err := grant.SetNonce(ctx, form.Nonce)
+ if err != nil {
+ log.Error("Unable to update nonce: %v", err)
+ }
+ }
+
+ var codeChallenge, codeChallengeMethod string
+ codeChallenge, _ = ctx.Session.Get("CodeChallenge").(string)
+ codeChallengeMethod, _ = ctx.Session.Get("CodeChallengeMethod").(string)
+
+ code, err := grant.GenerateNewAuthorizationCode(ctx, form.RedirectURI, codeChallenge, codeChallengeMethod)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+ redirect, err := code.GenerateRedirectURI(form.State)
+ if err != nil {
+ handleServerError(ctx, form.State, form.RedirectURI)
+ return
+ }
+ ctx.Redirect(redirect.String(), http.StatusSeeOther)
+}
+
+// OIDCWellKnown generates JSON so OIDC clients know Gitea's capabilities
+func OIDCWellKnown(ctx *context.Context) {
+ ctx.Data["SigningKey"] = oauth2.DefaultSigningKey
+ ctx.JSONTemplate("user/auth/oidc_wellknown")
+}
+
+// OIDCKeys generates the JSON Web Key Set
+func OIDCKeys(ctx *context.Context) {
+ jwk, err := oauth2.DefaultSigningKey.ToJWK()
+ if err != nil {
+ log.Error("Error converting signing key to JWK: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ jwk["use"] = "sig"
+
+ jwks := map[string][]map[string]string{
+ "keys": {
+ jwk,
+ },
+ }
+
+ ctx.Resp.Header().Set("Content-Type", "application/json")
+ enc := json.NewEncoder(ctx.Resp)
+ if err := enc.Encode(jwks); err != nil {
+ log.Error("Failed to encode representation as json. Error: %v", err)
+ }
+}
+
+// AccessTokenOAuth manages all access token requests by the client
+func AccessTokenOAuth(ctx *context.Context) {
+ form := *web.GetForm(ctx).(*forms.AccessTokenForm)
+ // if there is no ClientID or ClientSecret in the request body, fill these fields by the Authorization header and ensure the provided field matches the Authorization header
+ if form.ClientID == "" || form.ClientSecret == "" {
+ authHeader := ctx.Req.Header.Get("Authorization")
+ if authType, authData, ok := strings.Cut(authHeader, " "); ok && strings.EqualFold(authType, "Basic") {
+ clientID, clientSecret, err := base.BasicAuthDecode(authData)
+ if err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot parse basic auth header",
+ })
+ return
+ }
+ // validate that any fields present in the form match the Basic auth header
+ if form.ClientID != "" && form.ClientID != clientID {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "client_id in request body inconsistent with Authorization header",
+ })
+ return
+ }
+ form.ClientID = clientID
+ if form.ClientSecret != "" && form.ClientSecret != clientSecret {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "client_secret in request body inconsistent with Authorization header",
+ })
+ return
+ }
+ form.ClientSecret = clientSecret
+ }
+ }
+
+ serverKey := oauth2.DefaultSigningKey
+ clientKey := serverKey
+ if serverKey.IsSymmetric() {
+ var err error
+ clientKey, err = oauth2.CreateJWTSigningKey(serverKey.SigningMethod().Alg(), []byte(form.ClientSecret))
+ if err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "Error creating signing key",
+ })
+ return
+ }
+ }
+
+ switch form.GrantType {
+ case "refresh_token":
+ handleRefreshToken(ctx, form, serverKey, clientKey)
+ case "authorization_code":
+ handleAuthorizationCode(ctx, form, serverKey, clientKey)
+ default:
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnsupportedGrantType,
+ ErrorDescription: "Only refresh_token or authorization_code grant type is supported",
+ })
+ }
+}
+
+func handleRefreshToken(ctx *context.Context, form forms.AccessTokenForm, serverKey, clientKey oauth2.JWTSigningKey) {
+ app, err := auth.GetOAuth2ApplicationByClientID(ctx, form.ClientID)
+ if err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidClient,
+ ErrorDescription: fmt.Sprintf("cannot load client with client id: %q", form.ClientID),
+ })
+ return
+ }
+ // "The authorization server MUST ... require client authentication for confidential clients"
+ // https://datatracker.ietf.org/doc/html/rfc6749#section-6
+ if app.ConfidentialClient && !app.ValidateClientSecret([]byte(form.ClientSecret)) {
+ errorDescription := "invalid client secret"
+ if form.ClientSecret == "" {
+ errorDescription = "invalid empty client secret"
+ }
+ // "invalid_client ... Client authentication failed"
+ // https://datatracker.ietf.org/doc/html/rfc6749#section-5.2
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidClient,
+ ErrorDescription: errorDescription,
+ })
+ return
+ }
+
+ token, err := oauth2.ParseToken(form.RefreshToken, serverKey)
+ if err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: "unable to parse refresh token",
+ })
+ return
+ }
+ // get grant before increasing counter
+ grant, err := auth.GetOAuth2GrantByID(ctx, token.GrantID)
+ if err != nil || grant == nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidGrant,
+ ErrorDescription: "grant does not exist",
+ })
+ return
+ }
+
+ // check if token got already used
+ if setting.OAuth2.InvalidateRefreshTokens && (grant.Counter != token.Counter || token.Counter == 0) {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: "token was already used",
+ })
+ log.Warn("A client tried to use a refresh token for grant_id = %d was used twice!", grant.ID)
+ return
+ }
+ accessToken, tokenErr := newAccessTokenResponse(ctx, grant, serverKey, clientKey)
+ if tokenErr != nil {
+ handleAccessTokenError(ctx, *tokenErr)
+ return
+ }
+ ctx.JSON(http.StatusOK, accessToken)
+}
+
+func handleAuthorizationCode(ctx *context.Context, form forms.AccessTokenForm, serverKey, clientKey oauth2.JWTSigningKey) {
+ app, err := auth.GetOAuth2ApplicationByClientID(ctx, form.ClientID)
+ if err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidClient,
+ ErrorDescription: fmt.Sprintf("cannot load client with client id: '%s'", form.ClientID),
+ })
+ return
+ }
+ if app.ConfidentialClient && !app.ValidateClientSecret([]byte(form.ClientSecret)) {
+ errorDescription := "invalid client secret"
+ if form.ClientSecret == "" {
+ errorDescription = "invalid empty client secret"
+ }
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: errorDescription,
+ })
+ return
+ }
+ if form.RedirectURI != "" && !app.ContainsRedirectURI(form.RedirectURI) {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: "unexpected redirect URI",
+ })
+ return
+ }
+ authorizationCode, err := auth.GetOAuth2AuthorizationByCode(ctx, form.Code)
+ if err != nil || authorizationCode == nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: "client is not authorized",
+ })
+ return
+ }
+ // check if code verifier authorizes the client, PKCE support
+ if !authorizationCode.ValidateCodeChallenge(form.CodeVerifier) {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeUnauthorizedClient,
+ ErrorDescription: "failed PKCE code challenge",
+ })
+ return
+ }
+ // check if granted for this application
+ if authorizationCode.Grant.ApplicationID != app.ID {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidGrant,
+ ErrorDescription: "invalid grant",
+ })
+ return
+ }
+ // remove token from database to deny duplicate usage
+ if err := authorizationCode.Invalidate(ctx); err != nil {
+ handleAccessTokenError(ctx, AccessTokenError{
+ ErrorCode: AccessTokenErrorCodeInvalidRequest,
+ ErrorDescription: "cannot proceed your request",
+ })
+ }
+ resp, tokenErr := newAccessTokenResponse(ctx, authorizationCode.Grant, serverKey, clientKey)
+ if tokenErr != nil {
+ handleAccessTokenError(ctx, *tokenErr)
+ return
+ }
+ // send successful response
+ ctx.JSON(http.StatusOK, resp)
+}
+
+func handleAccessTokenError(ctx *context.Context, acErr AccessTokenError) {
+ ctx.JSON(http.StatusBadRequest, acErr)
+}
+
+func handleServerError(ctx *context.Context, state, redirectURI string) {
+ handleAuthorizeError(ctx, AuthorizeError{
+ ErrorCode: ErrorCodeServerError,
+ ErrorDescription: "A server error occurred",
+ State: state,
+ }, redirectURI)
+}
+
+func handleAuthorizeError(ctx *context.Context, authErr AuthorizeError, redirectURI string) {
+ if redirectURI == "" {
+ log.Warn("Authorization failed: %v", authErr.ErrorDescription)
+ ctx.Data["Error"] = authErr
+ ctx.HTML(http.StatusBadRequest, tplGrantError)
+ return
+ }
+ redirect, err := url.Parse(redirectURI)
+ if err != nil {
+ ctx.ServerError("url.Parse", err)
+ return
+ }
+ q := redirect.Query()
+ q.Set("error", string(authErr.ErrorCode))
+ q.Set("error_description", authErr.ErrorDescription)
+ q.Set("state", authErr.State)
+ redirect.RawQuery = q.Encode()
+ ctx.Redirect(redirect.String(), http.StatusSeeOther)
+}
+
+// SignInOAuth handles the OAuth2 login buttons
+func SignInOAuth(ctx *context.Context) {
+ provider := ctx.Params(":provider")
+
+ authSource, err := auth.GetActiveOAuth2SourceByName(ctx, provider)
+ if err != nil {
+ ctx.ServerError("SignIn", err)
+ return
+ }
+
+ redirectTo := ctx.FormString("redirect_to")
+ if len(redirectTo) > 0 {
+ middleware.SetRedirectToCookie(ctx.Resp, redirectTo)
+ }
+
+ // try to do a direct callback flow, so we don't authenticate the user again but use the valid accesstoken to get the user
+ user, gothUser, err := oAuth2UserLoginCallback(ctx, authSource, ctx.Req, ctx.Resp)
+ if err == nil && user != nil {
+ // we got the user without going through the whole OAuth2 authentication flow again
+ handleOAuth2SignIn(ctx, authSource, user, gothUser)
+ return
+ }
+
+ codeChallenge, err := generateCodeChallenge(ctx, provider)
+ if err != nil {
+ ctx.ServerError("SignIn", fmt.Errorf("could not generate code_challenge: %w", err))
+ return
+ }
+
+ if err = authSource.Cfg.(*oauth2.Source).Callout(ctx.Req, ctx.Resp, codeChallenge); err != nil {
+ if strings.Contains(err.Error(), "no provider for ") {
+ if err = oauth2.ResetOAuth2(ctx); err != nil {
+ ctx.ServerError("SignIn", err)
+ return
+ }
+ if err = authSource.Cfg.(*oauth2.Source).Callout(ctx.Req, ctx.Resp, codeChallenge); err != nil {
+ ctx.ServerError("SignIn", err)
+ }
+ return
+ }
+ ctx.ServerError("SignIn", err)
+ }
+ // redirect is done in oauth2.Auth
+}
+
+// SignInOAuthCallback handles the callback from the given provider
+func SignInOAuthCallback(ctx *context.Context) {
+ provider := ctx.Params(":provider")
+
+ if ctx.Req.FormValue("error") != "" {
+ var errorKeyValues []string
+ for k, vv := range ctx.Req.Form {
+ for _, v := range vv {
+ errorKeyValues = append(errorKeyValues, fmt.Sprintf("%s = %s", html.EscapeString(k), html.EscapeString(v)))
+ }
+ }
+ sort.Strings(errorKeyValues)
+ ctx.Flash.Error(strings.Join(errorKeyValues, "<br>"), true)
+ }
+
+ // first look if the provider is still active
+ authSource, err := auth.GetActiveOAuth2SourceByName(ctx, provider)
+ if err != nil {
+ ctx.ServerError("SignIn", err)
+ return
+ }
+
+ if authSource == nil {
+ ctx.ServerError("SignIn", errors.New("no valid provider found, check configured callback url in provider"))
+ return
+ }
+
+ u, gothUser, err := oAuth2UserLoginCallback(ctx, authSource, ctx.Req, ctx.Resp)
+ if err != nil {
+ if user_model.IsErrUserProhibitLogin(err) {
+ uplerr := err.(user_model.ErrUserProhibitLogin)
+ log.Info("Failed authentication attempt for %s from %s: %v", uplerr.Name, ctx.RemoteAddr(), err)
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ return
+ }
+ if callbackErr, ok := err.(errCallback); ok {
+ log.Info("Failed OAuth callback: (%v) %v", callbackErr.Code, callbackErr.Description)
+ switch callbackErr.Code {
+ case "access_denied":
+ ctx.Flash.Error(ctx.Tr("auth.oauth.signin.error.access_denied"))
+ case "temporarily_unavailable":
+ ctx.Flash.Error(ctx.Tr("auth.oauth.signin.error.temporarily_unavailable"))
+ default:
+ ctx.Flash.Error(ctx.Tr("auth.oauth.signin.error"))
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/login")
+ return
+ }
+ if err, ok := err.(*go_oauth2.RetrieveError); ok {
+ ctx.Flash.Error("OAuth2 RetrieveError: "+err.Error(), true)
+ }
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ if u == nil {
+ if ctx.Doer != nil {
+ // attach user to already logged in user
+ err = externalaccount.LinkAccountToUser(ctx, ctx.Doer, gothUser)
+ if err != nil {
+ ctx.ServerError("UserLinkAccount", err)
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ return
+ } else if !setting.Service.AllowOnlyInternalRegistration && setting.OAuth2Client.EnableAutoRegistration {
+ // create new user with details from oauth2 provider
+ if gothUser.UserID == "" {
+ log.Error("OAuth2 Provider %s returned empty or missing field: UserID", authSource.Name)
+ if authSource.IsOAuth2() && authSource.Cfg.(*oauth2.Source).Provider == "openidConnect" {
+ log.Error("You may need to change the 'OPENID_CONNECT_SCOPES' setting to request all required fields")
+ }
+ err = fmt.Errorf("OAuth2 Provider %s returned empty or missing field: UserID", authSource.Name)
+ ctx.ServerError("CreateUser", err)
+ return
+ }
+ var missingFields []string
+ if gothUser.Email == "" {
+ missingFields = append(missingFields, "email")
+ }
+ if setting.OAuth2Client.Username == setting.OAuth2UsernameNickname && gothUser.NickName == "" {
+ missingFields = append(missingFields, "nickname")
+ }
+ if len(missingFields) > 0 {
+ // we don't have enough information to create an account automatically,
+ // so we prompt the user for the remaining bits
+ log.Trace("OAuth2 Provider %s returned empty or missing fields: %s, prompting the user for them", authSource.Name, missingFields)
+ showLinkingLogin(ctx, gothUser)
+ return
+ }
+ uname, err := getUserName(&gothUser)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ u = &user_model.User{
+ Name: uname,
+ FullName: gothUser.Name,
+ Email: gothUser.Email,
+ LoginType: auth.OAuth2,
+ LoginSource: authSource.ID,
+ LoginName: gothUser.UserID,
+ }
+
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(!setting.OAuth2Client.RegisterEmailConfirm && !setting.Service.RegisterManualConfirm),
+ }
+
+ source := authSource.Cfg.(*oauth2.Source)
+
+ isAdmin, isRestricted := getUserAdminAndRestrictedFromGroupClaims(source, &gothUser)
+ u.IsAdmin = isAdmin.ValueOrDefault(false)
+ u.IsRestricted = isRestricted.ValueOrDefault(false)
+
+ if !createAndHandleCreatedUser(ctx, base.TplName(""), nil, u, overwriteDefault, &gothUser, setting.OAuth2Client.AccountLinking != setting.OAuth2AccountLinkingDisabled) {
+ // error already handled
+ return
+ }
+
+ if err := syncGroupsToTeams(ctx, source, &gothUser, u); err != nil {
+ ctx.ServerError("SyncGroupsToTeams", err)
+ return
+ }
+ } else {
+ // no existing user is found, request attach or new account
+ showLinkingLogin(ctx, gothUser)
+ return
+ }
+ }
+
+ handleOAuth2SignIn(ctx, authSource, u, gothUser)
+}
+
+func claimValueToStringSet(claimValue any) container.Set[string] {
+ var groups []string
+
+ switch rawGroup := claimValue.(type) {
+ case []string:
+ groups = rawGroup
+ case []any:
+ for _, group := range rawGroup {
+ groups = append(groups, fmt.Sprintf("%s", group))
+ }
+ default:
+ str := fmt.Sprintf("%s", rawGroup)
+ groups = strings.Split(str, ",")
+ }
+ return container.SetOf(groups...)
+}
+
+func syncGroupsToTeams(ctx *context.Context, source *oauth2.Source, gothUser *goth.User, u *user_model.User) error {
+ if source.GroupTeamMap != "" || source.GroupTeamMapRemoval {
+ groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(source.GroupTeamMap)
+ if err != nil {
+ return err
+ }
+
+ groups := getClaimedGroups(source, gothUser)
+
+ if err := source_service.SyncGroupsToTeams(ctx, u, groups, groupTeamMapping, source.GroupTeamMapRemoval); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func getClaimedGroups(source *oauth2.Source, gothUser *goth.User) container.Set[string] {
+ groupClaims, has := gothUser.RawData[source.GroupClaimName]
+ if !has {
+ return nil
+ }
+
+ return claimValueToStringSet(groupClaims)
+}
+
+func getUserAdminAndRestrictedFromGroupClaims(source *oauth2.Source, gothUser *goth.User) (isAdmin, isRestricted optional.Option[bool]) {
+ groups := getClaimedGroups(source, gothUser)
+
+ if source.AdminGroup != "" {
+ isAdmin = optional.Some(groups.Contains(source.AdminGroup))
+ }
+ if source.RestrictedGroup != "" {
+ isRestricted = optional.Some(groups.Contains(source.RestrictedGroup))
+ }
+
+ return isAdmin, isRestricted
+}
+
+func showLinkingLogin(ctx *context.Context, gothUser goth.User) {
+ if err := updateSession(ctx, nil, map[string]any{
+ "linkAccountGothUser": gothUser,
+ }); err != nil {
+ ctx.ServerError("updateSession", err)
+ return
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/link_account")
+}
+
+func updateAvatarIfNeed(ctx *context.Context, url string, u *user_model.User) {
+ if setting.OAuth2Client.UpdateAvatar && len(url) > 0 {
+ resp, err := http.Get(url)
+ if err == nil {
+ defer func() {
+ _ = resp.Body.Close()
+ }()
+ }
+ // ignore any error
+ if err == nil && resp.StatusCode == http.StatusOK {
+ data, err := io.ReadAll(io.LimitReader(resp.Body, setting.Avatar.MaxFileSize+1))
+ if err == nil && int64(len(data)) <= setting.Avatar.MaxFileSize {
+ _ = user_service.UploadAvatar(ctx, u, data)
+ }
+ }
+ }
+}
+
+func handleOAuth2SignIn(ctx *context.Context, source *auth.Source, u *user_model.User, gothUser goth.User) {
+ updateAvatarIfNeed(ctx, gothUser.AvatarURL, u)
+
+ needs2FA := false
+ if !source.Cfg.(*oauth2.Source).SkipLocalTwoFA {
+ _, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ needs2FA = err == nil
+ }
+
+ oauth2Source := source.Cfg.(*oauth2.Source)
+ groupTeamMapping, err := auth_module.UnmarshalGroupTeamMapping(oauth2Source.GroupTeamMap)
+ if err != nil {
+ ctx.ServerError("UnmarshalGroupTeamMapping", err)
+ return
+ }
+
+ groups := getClaimedGroups(oauth2Source, &gothUser)
+
+ opts := &user_service.UpdateOptions{}
+
+ // Reactivate user if they are deactivated
+ if !u.IsActive {
+ opts.IsActive = optional.Some(true)
+ }
+
+ // Update GroupClaims
+ opts.IsAdmin, opts.IsRestricted = getUserAdminAndRestrictedFromGroupClaims(oauth2Source, &gothUser)
+
+ if oauth2Source.GroupTeamMap != "" || oauth2Source.GroupTeamMapRemoval {
+ if err := source_service.SyncGroupsToTeams(ctx, u, groups, groupTeamMapping, oauth2Source.GroupTeamMapRemoval); err != nil {
+ ctx.ServerError("SyncGroupsToTeams", err)
+ return
+ }
+ }
+
+ if err := externalaccount.EnsureLinkExternalToUser(ctx, u, gothUser); err != nil {
+ ctx.ServerError("EnsureLinkExternalToUser", err)
+ return
+ }
+
+ // If this user is enrolled in 2FA and this source doesn't override it,
+ // we can't sign the user in just yet. Instead, redirect them to the 2FA authentication page.
+ if !needs2FA {
+ // Register last login
+ opts.SetLastLogin = true
+
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ if err := updateSession(ctx, nil, map[string]any{
+ "uid": u.ID,
+ }); err != nil {
+ ctx.ServerError("updateSession", err)
+ return
+ }
+
+ // Clear whatever CSRF cookie has right now, force to generate a new one
+ ctx.Csrf.DeleteCookie(ctx)
+
+ if err := resetLocale(ctx, u); err != nil {
+ ctx.ServerError("resetLocale", err)
+ return
+ }
+
+ if redirectTo := ctx.GetSiteCookie("redirect_to"); len(redirectTo) > 0 {
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ ctx.RedirectToFirst(redirectTo)
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/")
+ return
+ }
+
+ if opts.IsActive.Has() || opts.IsAdmin.Has() || opts.IsRestricted.Has() {
+ if err := user_service.UpdateUser(ctx, u, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+ }
+
+ if err := updateSession(ctx, nil, map[string]any{
+ // User needs to use 2FA, save data and redirect to 2FA page.
+ "twofaUid": u.ID,
+ "twofaRemember": false,
+ }); err != nil {
+ ctx.ServerError("updateSession", err)
+ return
+ }
+
+ // If WebAuthn is enrolled -> Redirect to WebAuthn instead
+ regs, err := auth.GetWebAuthnCredentialsByUID(ctx, u.ID)
+ if err == nil && len(regs) > 0 {
+ ctx.Redirect(setting.AppSubURL + "/user/webauthn")
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/two_factor")
+}
+
+// generateCodeChallenge stores a code verifier in the session and returns a S256 code challenge for PKCE
+func generateCodeChallenge(ctx *context.Context, provider string) (codeChallenge string, err error) {
+ // the `code_verifier` is only forwarded by specific providers
+ // https://codeberg.org/forgejo/forgejo/issues/4033
+ p, ok := goth.GetProviders()[provider]
+ if !ok {
+ return "", nil
+ }
+ switch p.(type) {
+ default:
+ return "", nil
+ case *openidConnect.Provider, *fitbit.Provider, *zoom.Provider:
+ // those providers forward the `code_verifier`
+ // a code_challenge can be generated
+ }
+
+ codeVerifier, err := util.CryptoRandomString(43) // 256/log2(62) = 256 bits of entropy (each char having log2(62) of randomness)
+ if err != nil {
+ return "", err
+ }
+ if err = ctx.Session.Set("CodeVerifier", codeVerifier); err != nil {
+ return "", err
+ }
+ return encodeCodeChallenge(codeVerifier)
+}
+
+func encodeCodeChallenge(codeVerifier string) (string, error) {
+ hasher := sha256.New()
+ _, err := io.WriteString(hasher, codeVerifier)
+ codeChallenge := base64.RawURLEncoding.EncodeToString(hasher.Sum(nil))
+ return codeChallenge, err
+}
+
+// OAuth2UserLoginCallback attempts to handle the callback from the OAuth2 provider and if successful
+// login the user
+func oAuth2UserLoginCallback(ctx *context.Context, authSource *auth.Source, request *http.Request, response http.ResponseWriter) (*user_model.User, goth.User, error) {
+ gothUser, err := oAuth2FetchUser(ctx, authSource, request, response)
+ if err != nil {
+ return nil, goth.User{}, err
+ }
+
+ if _, _, err := remote_service.MaybePromoteRemoteUser(ctx, authSource, gothUser.UserID, gothUser.Email); err != nil {
+ return nil, goth.User{}, err
+ }
+
+ u, err := oAuth2GothUserToUser(request.Context(), authSource, gothUser)
+ return u, gothUser, err
+}
+
+func oAuth2FetchUser(ctx *context.Context, authSource *auth.Source, request *http.Request, response http.ResponseWriter) (goth.User, error) {
+ oauth2Source := authSource.Cfg.(*oauth2.Source)
+
+ // Make sure that the response is not an error response.
+ errorName := request.FormValue("error")
+
+ if len(errorName) > 0 {
+ errorDescription := request.FormValue("error_description")
+
+ // Delete the goth session
+ err := gothic.Logout(response, request)
+ if err != nil {
+ return goth.User{}, err
+ }
+
+ return goth.User{}, errCallback{
+ Code: errorName,
+ Description: errorDescription,
+ }
+ }
+
+ // Proceed to authenticate through goth.
+ codeVerifier, _ := ctx.Session.Get("CodeVerifier").(string)
+ _ = ctx.Session.Delete("CodeVerifier")
+ gothUser, err := oauth2Source.Callback(request, response, codeVerifier)
+ if err != nil {
+ if err.Error() == "securecookie: the value is too long" || strings.Contains(err.Error(), "Data too long") {
+ log.Error("OAuth2 Provider %s returned too long a token. Current max: %d. Either increase the [OAuth2] MAX_TOKEN_LENGTH or reduce the information returned from the OAuth2 provider", authSource.Name, setting.OAuth2.MaxTokenLength)
+ err = fmt.Errorf("OAuth2 Provider %s returned too long a token. Current max: %d. Either increase the [OAuth2] MAX_TOKEN_LENGTH or reduce the information returned from the OAuth2 provider", authSource.Name, setting.OAuth2.MaxTokenLength)
+ }
+ return goth.User{}, err
+ }
+
+ if oauth2Source.RequiredClaimName != "" {
+ claimInterface, has := gothUser.RawData[oauth2Source.RequiredClaimName]
+ if !has {
+ return goth.User{}, user_model.ErrUserProhibitLogin{Name: gothUser.UserID}
+ }
+
+ if oauth2Source.RequiredClaimValue != "" {
+ groups := claimValueToStringSet(claimInterface)
+
+ if !groups.Contains(oauth2Source.RequiredClaimValue) {
+ return goth.User{}, user_model.ErrUserProhibitLogin{Name: gothUser.UserID}
+ }
+ }
+ }
+
+ return gothUser, nil
+}
+
+func oAuth2GothUserToUser(ctx go_context.Context, authSource *auth.Source, gothUser goth.User) (*user_model.User, error) {
+ user := &user_model.User{
+ LoginName: gothUser.UserID,
+ LoginType: auth.OAuth2,
+ LoginSource: authSource.ID,
+ }
+
+ hasUser, err := user_model.GetUser(ctx, user)
+ if err != nil {
+ return nil, err
+ }
+
+ if hasUser {
+ return user, nil
+ }
+ log.Debug("no user found for LoginName %v, LoginSource %v, LoginType %v", user.LoginName, user.LoginSource, user.LoginType)
+
+ // search in external linked users
+ externalLoginUser := &user_model.ExternalLoginUser{
+ ExternalID: gothUser.UserID,
+ LoginSourceID: authSource.ID,
+ }
+ hasUser, err = user_model.GetExternalLogin(ctx, externalLoginUser)
+ if err != nil {
+ return nil, err
+ }
+ if hasUser {
+ user, err = user_model.GetUserByID(ctx, externalLoginUser.UserID)
+ return user, err
+ }
+
+ // no user found to login
+ return nil, nil
+}
diff --git a/routers/web/auth/oauth_test.go b/routers/web/auth/oauth_test.go
new file mode 100644
index 0000000..5a4a646
--- /dev/null
+++ b/routers/web/auth/oauth_test.go
@@ -0,0 +1,103 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func createAndParseToken(t *testing.T, grant *auth.OAuth2Grant) *oauth2.OIDCToken {
+ signingKey, err := oauth2.CreateJWTSigningKey("HS256", make([]byte, 32))
+ require.NoError(t, err)
+ assert.NotNil(t, signingKey)
+
+ response, terr := newAccessTokenResponse(db.DefaultContext, grant, signingKey, signingKey)
+ assert.Nil(t, terr)
+ assert.NotNil(t, response)
+
+ parsedToken, err := jwt.ParseWithClaims(response.IDToken, &oauth2.OIDCToken{}, func(token *jwt.Token) (any, error) {
+ assert.NotNil(t, token.Method)
+ assert.Equal(t, signingKey.SigningMethod().Alg(), token.Method.Alg())
+ return signingKey.VerifyKey(), nil
+ })
+ require.NoError(t, err)
+ assert.True(t, parsedToken.Valid)
+
+ oidcToken, ok := parsedToken.Claims.(*oauth2.OIDCToken)
+ assert.True(t, ok)
+ assert.NotNil(t, oidcToken)
+
+ return oidcToken
+}
+
+func TestNewAccessTokenResponse_OIDCToken(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ grants, err := auth.GetOAuth2GrantsByUserID(db.DefaultContext, 3)
+ require.NoError(t, err)
+ assert.Len(t, grants, 1)
+
+ // Scopes: openid
+ oidcToken := createAndParseToken(t, grants[0])
+ assert.Empty(t, oidcToken.Name)
+ assert.Empty(t, oidcToken.PreferredUsername)
+ assert.Empty(t, oidcToken.Profile)
+ assert.Empty(t, oidcToken.Picture)
+ assert.Empty(t, oidcToken.Website)
+ assert.Empty(t, oidcToken.UpdatedAt)
+ assert.Empty(t, oidcToken.Email)
+ assert.False(t, oidcToken.EmailVerified)
+
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 5})
+ grants, err = auth.GetOAuth2GrantsByUserID(db.DefaultContext, user.ID)
+ require.NoError(t, err)
+ assert.Len(t, grants, 1)
+
+ // Scopes: openid profile email
+ oidcToken = createAndParseToken(t, grants[0])
+ assert.Equal(t, user.Name, oidcToken.Name)
+ assert.Equal(t, user.Name, oidcToken.PreferredUsername)
+ assert.Equal(t, user.HTMLURL(), oidcToken.Profile)
+ assert.Equal(t, user.AvatarLink(db.DefaultContext), oidcToken.Picture)
+ assert.Equal(t, user.Website, oidcToken.Website)
+ assert.Equal(t, user.UpdatedUnix, oidcToken.UpdatedAt)
+ assert.Equal(t, user.Email, oidcToken.Email)
+ assert.Equal(t, user.IsActive, oidcToken.EmailVerified)
+
+ // set DefaultShowFullName to true
+ oldDefaultShowFullName := setting.UI.DefaultShowFullName
+ setting.UI.DefaultShowFullName = true
+ defer func() {
+ setting.UI.DefaultShowFullName = oldDefaultShowFullName
+ }()
+
+ // Scopes: openid profile email
+ oidcToken = createAndParseToken(t, grants[0])
+ assert.Equal(t, user.FullName, oidcToken.Name)
+ assert.Equal(t, user.Name, oidcToken.PreferredUsername)
+ assert.Equal(t, user.HTMLURL(), oidcToken.Profile)
+ assert.Equal(t, user.AvatarLink(db.DefaultContext), oidcToken.Picture)
+ assert.Equal(t, user.Website, oidcToken.Website)
+ assert.Equal(t, user.UpdatedUnix, oidcToken.UpdatedAt)
+ assert.Equal(t, user.Email, oidcToken.Email)
+ assert.Equal(t, user.IsActive, oidcToken.EmailVerified)
+}
+
+func TestEncodeCodeChallenge(t *testing.T) {
+ // test vector from https://datatracker.ietf.org/doc/html/rfc7636#page-18
+ codeChallenge, err := encodeCodeChallenge("dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk")
+ require.NoError(t, err)
+ assert.Equal(t, "E9Melhoa2OwvFrEMTJguCHaoeK1t8URWbuGJSstw-cM", codeChallenge)
+}
diff --git a/routers/web/auth/openid.go b/routers/web/auth/openid.go
new file mode 100644
index 0000000..83268fa
--- /dev/null
+++ b/routers/web/auth/openid.go
@@ -0,0 +1,391 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/openid"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplSignInOpenID base.TplName = "user/auth/signin_openid"
+ tplConnectOID base.TplName = "user/auth/signup_openid_connect"
+ tplSignUpOID base.TplName = "user/auth/signup_openid_register"
+)
+
+// SignInOpenID render sign in page
+func SignInOpenID(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+
+ if ctx.FormString("openid.return_to") != "" {
+ signInOpenIDVerify(ctx)
+ return
+ }
+
+ if CheckAutoLogin(ctx) {
+ return
+ }
+
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsLoginOpenID"] = true
+ ctx.HTML(http.StatusOK, tplSignInOpenID)
+}
+
+// Check if the given OpenID URI is allowed by blacklist/whitelist
+func allowedOpenIDURI(uri string) (err error) {
+ // In case a Whitelist is present, URI must be in it
+ // in order to be accepted
+ if len(setting.Service.OpenIDWhitelist) != 0 {
+ for _, pat := range setting.Service.OpenIDWhitelist {
+ if pat.MatchString(uri) {
+ return nil // pass
+ }
+ }
+ // must match one of this or be refused
+ return fmt.Errorf("URI not allowed by whitelist")
+ }
+
+ // A blacklist match expliclty forbids
+ for _, pat := range setting.Service.OpenIDBlacklist {
+ if pat.MatchString(uri) {
+ return fmt.Errorf("URI forbidden by blacklist")
+ }
+ }
+
+ return nil
+}
+
+// SignInOpenIDPost response for openid sign in request
+func SignInOpenIDPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.SignInOpenIDForm)
+ ctx.Data["Title"] = ctx.Tr("sign_in")
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsLoginOpenID"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSignInOpenID)
+ return
+ }
+
+ id, err := openid.Normalize(form.Openid)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)
+ return
+ }
+ form.Openid = id
+
+ log.Trace("OpenID uri: " + id)
+
+ err = allowedOpenIDURI(id)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &form)
+ return
+ }
+
+ redirectTo := setting.AppURL + "user/login/openid"
+ url, err := openid.RedirectURL(id, redirectTo, setting.AppURL)
+ if err != nil {
+ log.Error("Error in OpenID redirect URL: %s, %v", redirectTo, err.Error())
+ ctx.RenderWithErr(fmt.Sprintf("Unable to find OpenID provider in %s", redirectTo), tplSignInOpenID, &form)
+ return
+ }
+
+ // Request optional nickname and email info
+ // NOTE: change to `openid.sreg.required` to require it
+ url += "&openid.ns.sreg=http%3A%2F%2Fopenid.net%2Fextensions%2Fsreg%2F1.1"
+ url += "&openid.sreg.optional=nickname%2Cemail"
+
+ log.Trace("Form-passed openid-remember: %t", form.Remember)
+
+ if err := ctx.Session.Set("openid_signin_remember", form.Remember); err != nil {
+ log.Error("SignInOpenIDPost: Could not set openid_signin_remember in session: %v", err)
+ }
+ if err := ctx.Session.Release(); err != nil {
+ log.Error("SignInOpenIDPost: Unable to save changes to the session: %v", err)
+ }
+
+ ctx.Redirect(url)
+}
+
+// signInOpenIDVerify handles response from OpenID provider
+func signInOpenIDVerify(ctx *context.Context) {
+ log.Trace("Incoming call to: %s", ctx.Req.URL.String())
+
+ fullURL := setting.AppURL + ctx.Req.URL.String()[1:]
+ log.Trace("Full URL: %s", fullURL)
+
+ id, err := openid.Verify(fullURL)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+
+ log.Trace("Verified ID: %s", id)
+
+ /* Now we should seek for the user and log him in, or prompt
+ * to register if not found */
+
+ u, err := user_model.GetUserByOpenID(ctx, id)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+ log.Error("signInOpenIDVerify: %v", err)
+ }
+ if u != nil {
+ log.Trace("User exists, logging in")
+ remember, _ := ctx.Session.Get("openid_signin_remember").(bool)
+ log.Trace("Session stored openid-remember: %t", remember)
+ handleSignIn(ctx, u, remember)
+ return
+ }
+
+ log.Trace("User with openid: %s does not exist, should connect or register", id)
+
+ parsedURL, err := url.Parse(fullURL)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+ values, err := url.ParseQuery(parsedURL.RawQuery)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+ email := values.Get("openid.sreg.email")
+ nickname := values.Get("openid.sreg.nickname")
+
+ log.Trace("User has email=%s and nickname=%s", email, nickname)
+
+ if email != "" {
+ u, err = user_model.GetUserByEmail(ctx, email)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+ log.Error("signInOpenIDVerify: %v", err)
+ }
+ if u != nil {
+ log.Trace("Local user %s has OpenID provided email %s", u.LowerName, email)
+ }
+ }
+
+ if u == nil && nickname != "" {
+ u, _ = user_model.GetUserByName(ctx, nickname)
+ if err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ ctx.RenderWithErr(err.Error(), tplSignInOpenID, &forms.SignInOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+ }
+ if u != nil {
+ log.Trace("Local user %s has OpenID provided nickname %s", u.LowerName, nickname)
+ }
+ }
+
+ if u != nil {
+ nickname = u.LowerName
+ }
+ if err := updateSession(ctx, nil, map[string]any{
+ "openid_verified_uri": id,
+ "openid_determined_email": email,
+ "openid_determined_username": nickname,
+ }); err != nil {
+ ctx.ServerError("updateSession", err)
+ return
+ }
+
+ if u != nil || !setting.Service.EnableOpenIDSignUp || setting.Service.AllowOnlyInternalRegistration {
+ ctx.Redirect(setting.AppSubURL + "/user/openid/connect")
+ } else {
+ ctx.Redirect(setting.AppSubURL + "/user/openid/register")
+ }
+}
+
+// ConnectOpenID shows a form to connect an OpenID URI to an existing account
+func ConnectOpenID(ctx *context.Context) {
+ oid, _ := ctx.Session.Get("openid_verified_uri").(string)
+ if oid == "" {
+ ctx.Redirect(setting.AppSubURL + "/user/login/openid")
+ return
+ }
+ ctx.Data["Title"] = "OpenID connect"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsOpenIDConnect"] = true
+ ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp
+ ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
+ ctx.Data["OpenID"] = oid
+ userName, _ := ctx.Session.Get("openid_determined_username").(string)
+ if userName != "" {
+ ctx.Data["user_name"] = userName
+ }
+ ctx.HTML(http.StatusOK, tplConnectOID)
+}
+
+// ConnectOpenIDPost handles submission of a form to connect an OpenID URI to an existing account
+func ConnectOpenIDPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ConnectOpenIDForm)
+ oid, _ := ctx.Session.Get("openid_verified_uri").(string)
+ if oid == "" {
+ ctx.Redirect(setting.AppSubURL + "/user/login/openid")
+ return
+ }
+ ctx.Data["Title"] = "OpenID connect"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsOpenIDConnect"] = true
+ ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp
+ ctx.Data["OpenID"] = oid
+
+ u, _, err := auth.UserSignIn(ctx, form.UserName, form.Password)
+ if err != nil {
+ handleSignInError(ctx, form.UserName, &form, tplConnectOID, "ConnectOpenIDPost", err)
+ return
+ }
+
+ // add OpenID for the user
+ userOID := &user_model.UserOpenID{UID: u.ID, URI: oid}
+ if err = user_model.AddUserOpenID(ctx, userOID); err != nil {
+ if user_model.IsErrOpenIDAlreadyUsed(err) {
+ ctx.RenderWithErr(ctx.Tr("form.openid_been_used", oid), tplConnectOID, &form)
+ return
+ }
+ ctx.ServerError("AddUserOpenID", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.add_openid_success"))
+
+ remember, _ := ctx.Session.Get("openid_signin_remember").(bool)
+ log.Trace("Session stored openid-remember: %t", remember)
+ handleSignIn(ctx, u, remember)
+}
+
+// RegisterOpenID shows a form to create a new user authenticated via an OpenID URI
+func RegisterOpenID(ctx *context.Context) {
+ oid, _ := ctx.Session.Get("openid_verified_uri").(string)
+ if oid == "" {
+ ctx.Redirect(setting.AppSubURL + "/user/login/openid")
+ return
+ }
+ ctx.Data["Title"] = "OpenID signup"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsOpenIDRegister"] = true
+ ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp
+ ctx.Data["AllowOnlyInternalRegistration"] = setting.Service.AllowOnlyInternalRegistration
+ ctx.Data["EnableCaptcha"] = setting.Service.EnableCaptcha
+ ctx.Data["Captcha"] = context.GetImageCaptcha()
+ ctx.Data["CaptchaType"] = setting.Service.CaptchaType
+ ctx.Data["RecaptchaSitekey"] = setting.Service.RecaptchaSitekey
+ ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey
+ ctx.Data["RecaptchaURL"] = setting.Service.RecaptchaURL
+ ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey
+ ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL
+ ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey
+ ctx.Data["OpenID"] = oid
+ userName, _ := ctx.Session.Get("openid_determined_username").(string)
+ if userName != "" {
+ ctx.Data["user_name"] = userName
+ }
+ email, _ := ctx.Session.Get("openid_determined_email").(string)
+ if email != "" {
+ ctx.Data["email"] = email
+ }
+ ctx.HTML(http.StatusOK, tplSignUpOID)
+}
+
+// RegisterOpenIDPost handles submission of a form to create a new user authenticated via an OpenID URI
+func RegisterOpenIDPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.SignUpOpenIDForm)
+ oid, _ := ctx.Session.Get("openid_verified_uri").(string)
+ if oid == "" {
+ ctx.Redirect(setting.AppSubURL + "/user/login/openid")
+ return
+ }
+
+ ctx.Data["Title"] = "OpenID signup"
+ ctx.Data["PageIsSignIn"] = true
+ ctx.Data["PageIsOpenIDRegister"] = true
+ ctx.Data["EnableOpenIDSignUp"] = setting.Service.EnableOpenIDSignUp
+ context.SetCaptchaData(ctx)
+ ctx.Data["OpenID"] = oid
+
+ if setting.Service.AllowOnlyInternalRegistration {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if setting.Service.EnableCaptcha {
+ if err := ctx.Req.ParseForm(); err != nil {
+ ctx.ServerError("", err)
+ return
+ }
+ context.VerifyCaptcha(ctx, tplSignUpOID, form)
+ }
+
+ length := setting.MinPasswordLength
+ if length < 256 {
+ length = 256
+ }
+ password, err := util.CryptoRandomString(int64(length))
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSignUpOID, form)
+ return
+ }
+
+ u := &user_model.User{
+ Name: form.UserName,
+ Email: form.Email,
+ Passwd: password,
+ }
+ if !createUserInContext(ctx, tplSignUpOID, form, u, nil, nil, false) {
+ // error already handled
+ return
+ }
+
+ // add OpenID for the user
+ userOID := &user_model.UserOpenID{UID: u.ID, URI: oid}
+ if err = user_model.AddUserOpenID(ctx, userOID); err != nil {
+ if user_model.IsErrOpenIDAlreadyUsed(err) {
+ ctx.RenderWithErr(ctx.Tr("form.openid_been_used", oid), tplSignUpOID, &form)
+ return
+ }
+ ctx.ServerError("AddUserOpenID", err)
+ return
+ }
+
+ if !handleUserCreated(ctx, u, nil) {
+ // error already handled
+ return
+ }
+
+ remember, _ := ctx.Session.Get("openid_signin_remember").(bool)
+ log.Trace("Session stored openid-remember: %t", remember)
+ handleSignIn(ctx, u, remember)
+}
diff --git a/routers/web/auth/password.go b/routers/web/auth/password.go
new file mode 100644
index 0000000..d25bd68
--- /dev/null
+++ b/routers/web/auth/password.go
@@ -0,0 +1,317 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+var (
+ // tplMustChangePassword template for updating a user's password
+ tplMustChangePassword base.TplName = "user/auth/change_passwd"
+ tplForgotPassword base.TplName = "user/auth/forgot_passwd"
+ tplResetPassword base.TplName = "user/auth/reset_passwd"
+)
+
+// ForgotPasswd render the forget password page
+func ForgotPasswd(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("auth.forgot_password_title")
+
+ if setting.MailService == nil {
+ log.Warn("no mail service configured")
+ ctx.Data["IsResetDisable"] = true
+ ctx.HTML(http.StatusOK, tplForgotPassword)
+ return
+ }
+
+ ctx.Data["Email"] = ctx.FormString("email")
+
+ ctx.Data["IsResetRequest"] = true
+ ctx.HTML(http.StatusOK, tplForgotPassword)
+}
+
+// ForgotPasswdPost response for forget password request
+func ForgotPasswdPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("auth.forgot_password_title")
+
+ if setting.MailService == nil {
+ ctx.NotFound("ForgotPasswdPost", nil)
+ return
+ }
+ ctx.Data["IsResetRequest"] = true
+
+ email := ctx.FormString("email")
+ ctx.Data["Email"] = email
+
+ u, err := user_model.GetUserByEmail(ctx, email)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Data["ResetPwdCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, ctx.Locale)
+ ctx.Data["IsResetSent"] = true
+ ctx.HTML(http.StatusOK, tplForgotPassword)
+ return
+ }
+
+ ctx.ServerError("user.ResetPasswd(check existence)", err)
+ return
+ }
+
+ if !u.IsLocal() && !u.IsOAuth2() {
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.non_local_account"), tplForgotPassword, nil)
+ return
+ }
+
+ if ctx.Cache.IsExist("MailResendLimit_" + u.LowerName) {
+ ctx.Data["ResendLimited"] = true
+ ctx.HTML(http.StatusOK, tplForgotPassword)
+ return
+ }
+
+ mailer.SendResetPasswordMail(u)
+
+ if err = ctx.Cache.Put("MailResendLimit_"+u.LowerName, u.LowerName, 180); err != nil {
+ log.Error("Set cache(MailResendLimit) fail: %v", err)
+ }
+
+ ctx.Data["ResetPwdCodeLives"] = timeutil.MinutesToFriendly(setting.Service.ResetPwdCodeLives, ctx.Locale)
+ ctx.Data["IsResetSent"] = true
+ ctx.HTML(http.StatusOK, tplForgotPassword)
+}
+
+func commonResetPassword(ctx *context.Context) (*user_model.User, *auth.TwoFactor) {
+ code := ctx.FormString("code")
+
+ ctx.Data["Title"] = ctx.Tr("auth.reset_password")
+ ctx.Data["Code"] = code
+
+ if nil != ctx.Doer {
+ ctx.Data["user_signed_in"] = true
+ }
+
+ if len(code) == 0 {
+ ctx.Flash.Error(ctx.Tr("auth.invalid_code_forgot_password", fmt.Sprintf("%s/user/forgot_password", setting.AppSubURL)), true)
+ return nil, nil
+ }
+
+ // Fail early, don't frustrate the user
+ u := user_model.VerifyUserActiveCode(ctx, code)
+ if u == nil {
+ ctx.Flash.Error(ctx.Tr("auth.invalid_code_forgot_password", fmt.Sprintf("%s/user/forgot_password", setting.AppSubURL)), true)
+ return nil, nil
+ }
+
+ twofa, err := auth.GetTwoFactorByUID(ctx, u.ID)
+ if err != nil {
+ if !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.Error(http.StatusInternalServerError, "CommonResetPassword", err.Error())
+ return nil, nil
+ }
+ } else {
+ ctx.Data["has_two_factor"] = true
+ ctx.Data["scratch_code"] = ctx.FormBool("scratch_code")
+ }
+
+ // Show the user that they are affecting the account that they intended to
+ ctx.Data["user_email"] = u.Email
+
+ if nil != ctx.Doer && u.ID != ctx.Doer.ID {
+ ctx.Flash.Error(ctx.Tr("auth.reset_password_wrong_user", ctx.Doer.Email, u.Email), true)
+ return nil, nil
+ }
+
+ return u, twofa
+}
+
+// ResetPasswd render the account recovery page
+func ResetPasswd(ctx *context.Context) {
+ ctx.Data["IsResetForm"] = true
+
+ commonResetPassword(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplResetPassword)
+}
+
+// ResetPasswdPost response from account recovery request
+func ResetPasswdPost(ctx *context.Context) {
+ u, twofa := commonResetPassword(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if u == nil {
+ // Flash error has been set
+ ctx.HTML(http.StatusOK, tplResetPassword)
+ return
+ }
+
+ // Handle two-factor
+ regenerateScratchToken := false
+ if twofa != nil {
+ if ctx.FormBool("scratch_code") {
+ if !twofa.VerifyScratchToken(ctx.FormString("token")) {
+ ctx.Data["IsResetForm"] = true
+ ctx.Data["Err_Token"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.twofa_scratch_token_incorrect"), tplResetPassword, nil)
+ return
+ }
+ regenerateScratchToken = true
+ } else {
+ passcode := ctx.FormString("passcode")
+ ok, err := twofa.ValidateTOTP(passcode)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "ValidateTOTP", err.Error())
+ return
+ }
+ if !ok || twofa.LastUsedPasscode == passcode {
+ ctx.Data["IsResetForm"] = true
+ ctx.Data["Err_Passcode"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.twofa_passcode_incorrect"), tplResetPassword, nil)
+ return
+ }
+
+ twofa.LastUsedPasscode = passcode
+ if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
+ ctx.ServerError("ResetPasswdPost: UpdateTwoFactor", err)
+ return
+ }
+ }
+ }
+
+ opts := &user_service.UpdateAuthOptions{
+ Password: optional.Some(ctx.FormString("password")),
+ MustChangePassword: optional.Some(false),
+ }
+ if err := user_service.UpdateAuth(ctx, u, opts); err != nil {
+ ctx.Data["IsResetForm"] = true
+ ctx.Data["Err_Password"] = true
+ switch {
+ case errors.Is(err, password.ErrMinLength):
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplResetPassword, nil)
+ case errors.Is(err, password.ErrComplexity):
+ ctx.RenderWithErr(password.BuildComplexityError(ctx.Locale), tplResetPassword, nil)
+ case errors.Is(err, password.ErrIsPwned):
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords"), tplResetPassword, nil)
+ case password.IsErrIsPwnedRequest(err):
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned_err"), tplResetPassword, nil)
+ default:
+ ctx.ServerError("UpdateAuth", err)
+ }
+ return
+ }
+
+ log.Trace("User password reset: %s", u.Name)
+ ctx.Data["IsResetFailed"] = true
+ remember := len(ctx.FormString("remember")) != 0
+
+ if regenerateScratchToken {
+ // Invalidate the scratch token.
+ _, err := twofa.GenerateScratchToken()
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ handleSignInFull(ctx, u, remember, false)
+ if ctx.Written() {
+ return
+ }
+ ctx.Flash.Info(ctx.Tr("auth.twofa_scratch_used"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ return
+ }
+
+ handleSignIn(ctx, u, remember)
+}
+
+// MustChangePassword renders the page to change a user's password
+func MustChangePassword(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("auth.must_change_password")
+ ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/settings/change_password"
+ ctx.Data["MustChangePassword"] = true
+ ctx.HTML(http.StatusOK, tplMustChangePassword)
+}
+
+// MustChangePasswordPost response for updating a user's password after their
+// account was created by an admin
+func MustChangePasswordPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.MustChangePasswordForm)
+ ctx.Data["Title"] = ctx.Tr("auth.must_change_password")
+ ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/settings/change_password"
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplMustChangePassword)
+ return
+ }
+
+ // Make sure only requests for users who are eligible to change their password via
+ // this method passes through
+ if !ctx.Doer.MustChangePassword {
+ ctx.ServerError("MustUpdatePassword", errors.New("cannot update password. Please visit the settings page"))
+ return
+ }
+
+ if form.Password != form.Retype {
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("form.password_not_match"), tplMustChangePassword, &form)
+ return
+ }
+
+ opts := &user_service.UpdateAuthOptions{
+ Password: optional.Some(form.Password),
+ MustChangePassword: optional.Some(false),
+ }
+ if err := user_service.UpdateAuth(ctx, ctx.Doer, opts); err != nil {
+ switch {
+ case errors.Is(err, password.ErrMinLength):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_too_short", setting.MinPasswordLength), tplMustChangePassword, &form)
+ case errors.Is(err, password.ErrComplexity):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(password.BuildComplexityError(ctx.Locale), tplMustChangePassword, &form)
+ case errors.Is(err, password.ErrIsPwned):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords"), tplMustChangePassword, &form)
+ case password.IsErrIsPwnedRequest(err):
+ ctx.Data["Err_Password"] = true
+ ctx.RenderWithErr(ctx.Tr("auth.password_pwned_err"), tplMustChangePassword, &form)
+ default:
+ ctx.ServerError("UpdateAuth", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.change_password_success"))
+
+ log.Trace("User updated password: %s", ctx.Doer.Name)
+
+ redirectTo := ctx.GetSiteCookie("redirect_to")
+ if redirectTo != "" {
+ middleware.DeleteRedirectToCookie(ctx.Resp)
+ }
+ ctx.RedirectToFirst(redirectTo)
+}
diff --git a/routers/web/auth/webauthn.go b/routers/web/auth/webauthn.go
new file mode 100644
index 0000000..5c93c14
--- /dev/null
+++ b/routers/web/auth/webauthn.go
@@ -0,0 +1,177 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package auth
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+ wa "code.gitea.io/gitea/modules/auth/webauthn"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/externalaccount"
+
+ "github.com/go-webauthn/webauthn/protocol"
+ "github.com/go-webauthn/webauthn/webauthn"
+)
+
+var tplWebAuthn base.TplName = "user/auth/webauthn"
+
+// WebAuthn shows the WebAuthn login page
+func WebAuthn(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("twofa")
+
+ if CheckAutoLogin(ctx) {
+ return
+ }
+
+ // Ensure user is in a 2FA session.
+ if ctx.Session.Get("twofaUid") == nil {
+ ctx.ServerError("UserSignIn", errors.New("not in WebAuthn session"))
+ return
+ }
+
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, ctx.Session.Get("twofaUid").(int64))
+ if err != nil {
+ ctx.ServerError("HasTwoFactorByUID", err)
+ return
+ }
+
+ ctx.Data["HasTwoFactor"] = hasTwoFactor
+
+ ctx.HTML(http.StatusOK, tplWebAuthn)
+}
+
+// WebAuthnLoginAssertion submits a WebAuthn challenge to the browser
+func WebAuthnLoginAssertion(ctx *context.Context) {
+ // Ensure user is in a WebAuthn session.
+ idSess, ok := ctx.Session.Get("twofaUid").(int64)
+ if !ok || idSess == 0 {
+ ctx.ServerError("UserSignIn", errors.New("not in WebAuthn session"))
+ return
+ }
+
+ user, err := user_model.GetUserByID(ctx, idSess)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ exists, err := auth.ExistsWebAuthnCredentialsForUID(ctx, user.ID)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+ if !exists {
+ ctx.ServerError("UserSignIn", errors.New("no device registered"))
+ return
+ }
+
+ assertion, sessionData, err := wa.WebAuthn.BeginLogin((*wa.User)(user))
+ if err != nil {
+ ctx.ServerError("webauthn.BeginLogin", err)
+ return
+ }
+
+ if err := ctx.Session.Set("webauthnAssertion", sessionData); err != nil {
+ ctx.ServerError("Session.Set", err)
+ return
+ }
+ ctx.JSON(http.StatusOK, assertion)
+}
+
+// WebAuthnLoginAssertionPost validates the signature and logs the user in
+func WebAuthnLoginAssertionPost(ctx *context.Context) {
+ idSess, ok := ctx.Session.Get("twofaUid").(int64)
+ sessionData, okData := ctx.Session.Get("webauthnAssertion").(*webauthn.SessionData)
+ if !ok || !okData || sessionData == nil || idSess == 0 {
+ ctx.ServerError("UserSignIn", errors.New("not in WebAuthn session"))
+ return
+ }
+ defer func() {
+ _ = ctx.Session.Delete("webauthnAssertion")
+ }()
+
+ // Load the user from the db
+ user, err := user_model.GetUserByID(ctx, idSess)
+ if err != nil {
+ ctx.ServerError("UserSignIn", err)
+ return
+ }
+
+ log.Trace("Finishing webauthn authentication with user: %s", user.Name)
+
+ // Now we do the equivalent of webauthn.FinishLogin using a combination of our session data
+ // (from webauthnAssertion) and verify the provided request.0
+ parsedResponse, err := protocol.ParseCredentialRequestResponse(ctx.Req)
+ if err != nil {
+ // Failed authentication attempt.
+ log.Info("Failed authentication attempt for %s from %s: %v", user.Name, ctx.RemoteAddr(), err)
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ dbCred, err := auth.GetWebAuthnCredentialByCredID(ctx, user.ID, parsedResponse.RawID)
+ if err != nil {
+ ctx.ServerError("GetWebAuthnCredentialByCredID", err)
+ return
+ }
+
+ // If the credential is legacy, assume the values are correct. The
+ // specification mandates these flags don't change.
+ if dbCred.Legacy {
+ dbCred.BackupEligible = parsedResponse.Response.AuthenticatorData.Flags.HasBackupEligible()
+ dbCred.BackupState = parsedResponse.Response.AuthenticatorData.Flags.HasBackupState()
+ dbCred.Legacy = false
+
+ if err := dbCred.UpdateFromLegacy(ctx); err != nil {
+ ctx.ServerError("UpdateFromLegacy", err)
+ return
+ }
+ }
+
+ // Validate the parsed response.
+ cred, err := wa.WebAuthn.ValidateLogin((*wa.User)(user), *sessionData, parsedResponse)
+ if err != nil {
+ // Failed authentication attempt.
+ log.Info("Failed authentication attempt for %s from %s: %v", user.Name, ctx.RemoteAddr(), err)
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ // Ensure that the credential wasn't cloned by checking if CloneWarning is set.
+ // (This is set if the sign counter is less than the one we have stored.)
+ if cred.Authenticator.CloneWarning {
+ log.Info("Failed authentication attempt for %s from %s: cloned credential", user.Name, ctx.RemoteAddr())
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ dbCred.SignCount = cred.Authenticator.SignCount
+ if err := dbCred.UpdateSignCount(ctx); err != nil {
+ ctx.ServerError("UpdateSignCount", err)
+ return
+ }
+
+ // Now handle account linking if that's requested
+ if ctx.Session.Get("linkAccount") != nil {
+ if err := externalaccount.LinkAccountFromStore(ctx, ctx.Session, user); err != nil {
+ ctx.ServerError("LinkAccountFromStore", err)
+ return
+ }
+ }
+
+ remember := ctx.Session.Get("twofaRemember").(bool)
+ redirect := handleSignInFull(ctx, user, remember, false)
+ if redirect == "" {
+ redirect = setting.AppSubURL + "/"
+ }
+ _ = ctx.Session.Delete("twofaUid")
+
+ ctx.JSONRedirect(redirect)
+}
diff --git a/routers/web/base.go b/routers/web/base.go
new file mode 100644
index 0000000..78dde57
--- /dev/null
+++ b/routers/web/base.go
@@ -0,0 +1,98 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "os"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web/routing"
+)
+
+func storageHandler(storageSetting *setting.Storage, prefix string, objStore storage.ObjectStorage) http.HandlerFunc {
+ prefix = strings.Trim(prefix, "/")
+ funcInfo := routing.GetFuncInfo(storageHandler, prefix)
+
+ if storageSetting.MinioConfig.ServeDirect {
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ if req.Method != "GET" && req.Method != "HEAD" {
+ http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
+ return
+ }
+
+ if !strings.HasPrefix(req.URL.Path, "/"+prefix+"/") {
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+ routing.UpdateFuncInfo(req.Context(), funcInfo)
+
+ rPath := strings.TrimPrefix(req.URL.Path, "/"+prefix+"/")
+ rPath = util.PathJoinRelX(rPath)
+
+ u, err := objStore.URL(rPath, path.Base(rPath))
+ if err != nil {
+ if os.IsNotExist(err) || errors.Is(err, os.ErrNotExist) {
+ log.Warn("Unable to find %s %s", prefix, rPath)
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+ log.Error("Error whilst getting URL for %s %s. Error: %v", prefix, rPath, err)
+ http.Error(w, fmt.Sprintf("Error whilst getting URL for %s %s", prefix, rPath), http.StatusInternalServerError)
+ return
+ }
+
+ http.Redirect(w, req, u.String(), http.StatusTemporaryRedirect)
+ })
+ }
+
+ return http.HandlerFunc(func(w http.ResponseWriter, req *http.Request) {
+ if req.Method != "GET" && req.Method != "HEAD" {
+ http.Error(w, http.StatusText(http.StatusMethodNotAllowed), http.StatusMethodNotAllowed)
+ return
+ }
+
+ if !strings.HasPrefix(req.URL.Path, "/"+prefix+"/") {
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+ routing.UpdateFuncInfo(req.Context(), funcInfo)
+
+ rPath := strings.TrimPrefix(req.URL.Path, "/"+prefix+"/")
+ rPath = util.PathJoinRelX(rPath)
+ if rPath == "" || rPath == "." {
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+
+ fi, err := objStore.Stat(rPath)
+ if err != nil {
+ if os.IsNotExist(err) || errors.Is(err, os.ErrNotExist) {
+ log.Warn("Unable to find %s %s", prefix, rPath)
+ http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
+ return
+ }
+ log.Error("Error whilst opening %s %s. Error: %v", prefix, rPath, err)
+ http.Error(w, fmt.Sprintf("Error whilst opening %s %s", prefix, rPath), http.StatusInternalServerError)
+ return
+ }
+
+ fr, err := objStore.Open(rPath)
+ if err != nil {
+ log.Error("Error whilst opening %s %s. Error: %v", prefix, rPath, err)
+ http.Error(w, fmt.Sprintf("Error whilst opening %s %s", prefix, rPath), http.StatusInternalServerError)
+ return
+ }
+ defer fr.Close()
+ httpcache.ServeContentWithCacheControl(w, req, path.Base(rPath), fi.ModTime(), fr)
+ })
+}
diff --git a/routers/web/devtest/devtest.go b/routers/web/devtest/devtest.go
new file mode 100644
index 0000000..dd20663
--- /dev/null
+++ b/routers/web/devtest/devtest.go
@@ -0,0 +1,66 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package devtest
+
+import (
+ "net/http"
+ "path"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/services/context"
+)
+
+// List all devtest templates, they will be used for e2e tests for the UI components
+func List(ctx *context.Context) {
+ templateNames, err := templates.AssetFS().ListFiles("devtest", true)
+ if err != nil {
+ ctx.ServerError("AssetFS().ListFiles", err)
+ return
+ }
+ var subNames []string
+ for _, tmplName := range templateNames {
+ subName := strings.TrimSuffix(tmplName, ".tmpl")
+ if subName != "list" {
+ subNames = append(subNames, subName)
+ }
+ }
+ ctx.Data["SubNames"] = subNames
+ ctx.HTML(http.StatusOK, "devtest/list")
+}
+
+func FetchActionTest(ctx *context.Context) {
+ _ = ctx.Req.ParseForm()
+ ctx.Flash.Info("fetch-action: " + ctx.Req.Method + " " + ctx.Req.RequestURI + "<br>" +
+ "Form: " + ctx.Req.Form.Encode() + "<br>" +
+ "PostForm: " + ctx.Req.PostForm.Encode(),
+ )
+ time.Sleep(2 * time.Second)
+ ctx.JSONRedirect("")
+}
+
+func Tmpl(ctx *context.Context) {
+ now := time.Now()
+ ctx.Data["TimeNow"] = now
+ ctx.Data["TimePast5s"] = now.Add(-5 * time.Second)
+ ctx.Data["TimeFuture5s"] = now.Add(5 * time.Second)
+ ctx.Data["TimePast2m"] = now.Add(-2 * time.Minute)
+ ctx.Data["TimeFuture2m"] = now.Add(2 * time.Minute)
+ ctx.Data["TimePast1y"] = now.Add(-1 * 366 * 86400 * time.Second)
+ ctx.Data["TimeFuture1y"] = now.Add(1 * 366 * 86400 * time.Second)
+
+ if ctx.Req.Method == "POST" {
+ _ = ctx.Req.ParseForm()
+ ctx.Flash.Info("form: "+ctx.Req.Method+" "+ctx.Req.RequestURI+"<br>"+
+ "Form: "+ctx.Req.Form.Encode()+"<br>"+
+ "PostForm: "+ctx.Req.PostForm.Encode(),
+ true,
+ )
+ time.Sleep(2 * time.Second)
+ }
+
+ ctx.HTML(http.StatusOK, base.TplName("devtest"+path.Clean("/"+ctx.Params("sub"))))
+}
diff --git a/routers/web/events/events.go b/routers/web/events/events.go
new file mode 100644
index 0000000..52f20e0
--- /dev/null
+++ b/routers/web/events/events.go
@@ -0,0 +1,122 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package events
+
+import (
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/modules/eventsource"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/routers/web/auth"
+ "code.gitea.io/gitea/services/context"
+)
+
+// Events listens for events
+func Events(ctx *context.Context) {
+ // FIXME: Need to check if resp is actually a http.Flusher! - how though?
+
+ // Set the headers related to event streaming.
+ ctx.Resp.Header().Set("Content-Type", "text/event-stream")
+ ctx.Resp.Header().Set("Cache-Control", "no-cache")
+ ctx.Resp.Header().Set("Connection", "keep-alive")
+ ctx.Resp.Header().Set("X-Accel-Buffering", "no")
+ ctx.Resp.WriteHeader(http.StatusOK)
+
+ if !ctx.IsSigned {
+ // Return unauthorized status event
+ event := &eventsource.Event{
+ Name: "close",
+ Data: "unauthorized",
+ }
+ _, _ = event.WriteTo(ctx)
+ ctx.Resp.Flush()
+ return
+ }
+
+ // Listen to connection close and un-register messageChan
+ notify := ctx.Done()
+ ctx.Resp.Flush()
+
+ shutdownCtx := graceful.GetManager().ShutdownContext()
+
+ uid := ctx.Doer.ID
+
+ messageChan := eventsource.GetManager().Register(uid)
+
+ unregister := func() {
+ eventsource.GetManager().Unregister(uid, messageChan)
+ // ensure the messageChan is closed
+ for {
+ _, ok := <-messageChan
+ if !ok {
+ break
+ }
+ }
+ }
+
+ if _, err := ctx.Resp.Write([]byte("\n")); err != nil {
+ log.Error("Unable to write to EventStream: %v", err)
+ unregister()
+ return
+ }
+
+ timer := time.NewTicker(30 * time.Second)
+
+loop:
+ for {
+ select {
+ case <-timer.C:
+ event := &eventsource.Event{
+ Name: "ping",
+ }
+ _, err := event.WriteTo(ctx.Resp)
+ if err != nil {
+ log.Error("Unable to write to EventStream for user %s: %v", ctx.Doer.Name, err)
+ go unregister()
+ break loop
+ }
+ ctx.Resp.Flush()
+ case <-notify:
+ go unregister()
+ break loop
+ case <-shutdownCtx.Done():
+ go unregister()
+ break loop
+ case event, ok := <-messageChan:
+ if !ok {
+ break loop
+ }
+
+ // Handle logout
+ if event.Name == "logout" {
+ if ctx.Session.ID() == event.Data {
+ _, _ = (&eventsource.Event{
+ Name: "logout",
+ Data: "here",
+ }).WriteTo(ctx.Resp)
+ ctx.Resp.Flush()
+ go unregister()
+ auth.HandleSignOut(ctx)
+ break loop
+ }
+ // Replace the event - we don't want to expose the session ID to the user
+ event = &eventsource.Event{
+ Name: "logout",
+ Data: "elsewhere",
+ }
+ }
+
+ _, err := event.WriteTo(ctx.Resp)
+ if err != nil {
+ log.Error("Unable to write to EventStream for user %s: %v", ctx.Doer.Name, err)
+ go unregister()
+ break loop
+ }
+ ctx.Resp.Flush()
+ }
+ }
+ timer.Stop()
+}
diff --git a/routers/web/explore/code.go b/routers/web/explore/code.go
new file mode 100644
index 0000000..f61b832
--- /dev/null
+++ b/routers/web/explore/code.go
@@ -0,0 +1,144 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/base"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // tplExploreCode explore code page template
+ tplExploreCode base.TplName = "explore/code"
+)
+
+// Code render explore code page
+func Code(ctx *context.Context) {
+ if !setting.Indexer.RepoIndexerEnabled {
+ ctx.Redirect(setting.AppSubURL + "/explore")
+ return
+ }
+
+ ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["Title"] = ctx.Tr("explore")
+ ctx.Data["PageIsExplore"] = true
+ ctx.Data["PageIsExploreCode"] = true
+
+ language := ctx.FormTrim("l")
+ keyword := ctx.FormTrim("q")
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Language"] = language
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["PageIsViewCode"] = true
+
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplExploreCode)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ var (
+ repoIDs []int64
+ err error
+ isAdmin bool
+ )
+ if ctx.Doer != nil {
+ isAdmin = ctx.Doer.IsAdmin
+ }
+
+ // guest user or non-admin user
+ if ctx.Doer == nil || !isAdmin {
+ repoIDs, err = repo_model.FindUserCodeAccessibleRepoIDs(ctx, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("FindUserCodeAccessibleRepoIDs", err)
+ return
+ }
+ }
+
+ var (
+ total int
+ searchResults []*code_indexer.Result
+ searchResultLanguages []*code_indexer.SearchResultLanguages
+ )
+
+ if (len(repoIDs) > 0) || isAdmin {
+ total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, &code_indexer.SearchOptions{
+ RepoIDs: repoIDs,
+ Keyword: keyword,
+ IsKeywordFuzzy: isFuzzy,
+ Language: language,
+ Paginator: &db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.RepoSearchPagingNum,
+ },
+ })
+ if err != nil {
+ if code_indexer.IsAvailable(ctx) {
+ ctx.ServerError("SearchResults", err)
+ return
+ }
+ ctx.Data["CodeIndexerUnavailable"] = true
+ } else {
+ ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
+ }
+
+ loadRepoIDs := make([]int64, 0, len(searchResults))
+ for _, result := range searchResults {
+ var find bool
+ for _, id := range loadRepoIDs {
+ if id == result.RepoID {
+ find = true
+ break
+ }
+ }
+ if !find {
+ loadRepoIDs = append(loadRepoIDs, result.RepoID)
+ }
+ }
+
+ repoMaps, err := repo_model.GetRepositoriesMapByIDs(ctx, loadRepoIDs)
+ if err != nil {
+ ctx.ServerError("GetRepositoriesMapByIDs", err)
+ return
+ }
+
+ ctx.Data["RepoMaps"] = repoMaps
+
+ if len(loadRepoIDs) != len(repoMaps) {
+ // Remove deleted repos from search results
+ cleanedSearchResults := make([]*code_indexer.Result, 0, len(repoMaps))
+ for _, sr := range searchResults {
+ if _, found := repoMaps[sr.RepoID]; found {
+ cleanedSearchResults = append(cleanedSearchResults, sr)
+ }
+ }
+
+ searchResults = cleanedSearchResults
+ }
+ }
+
+ ctx.Data["SearchResults"] = searchResults
+ ctx.Data["SearchResultLanguages"] = searchResultLanguages
+
+ pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "l", "Language")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplExploreCode)
+}
diff --git a/routers/web/explore/org.go b/routers/web/explore/org.go
new file mode 100644
index 0000000..f8fd6ec
--- /dev/null
+++ b/routers/web/explore/org.go
@@ -0,0 +1,48 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/context"
+)
+
+// Organizations render explore organizations page
+func Organizations(ctx *context.Context) {
+ ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage
+ ctx.Data["Title"] = ctx.Tr("explore")
+ ctx.Data["PageIsExplore"] = true
+ ctx.Data["PageIsExploreOrganizations"] = true
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ visibleTypes := []structs.VisibleType{structs.VisibleTypePublic}
+ if ctx.Doer != nil {
+ visibleTypes = append(visibleTypes, structs.VisibleTypeLimited, structs.VisibleTypePrivate)
+ }
+
+ supportedSortOrders := container.SetOf(
+ "newest",
+ "oldest",
+ "alphabetically",
+ "reversealphabetically",
+ )
+ sortOrder := ctx.FormString("sort")
+ if sortOrder == "" {
+ sortOrder = "newest"
+ ctx.SetFormString("sort", sortOrder)
+ }
+
+ RenderUserSearch(ctx, &user_model.SearchUserOptions{
+ Actor: ctx.Doer,
+ Type: user_model.UserTypeOrganization,
+ ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
+ Visible: visibleTypes,
+
+ SupportedSortOrders: supportedSortOrders,
+ }, tplExploreUsers)
+}
diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go
new file mode 100644
index 0000000..116b983
--- /dev/null
+++ b/routers/web/explore/repo.go
@@ -0,0 +1,193 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sitemap"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // tplExploreRepos explore repositories page template
+ tplExploreRepos base.TplName = "explore/repos"
+ relevantReposOnlyParam string = "only_show_relevant"
+)
+
+// RepoSearchOptions when calling search repositories
+type RepoSearchOptions struct {
+ OwnerID int64
+ Private bool
+ Restricted bool
+ PageSize int
+ OnlyShowRelevant bool
+ TplName base.TplName
+}
+
+// RenderRepoSearch render repositories search page
+// This function is also used to render the Admin Repository Management page.
+func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) {
+ // Sitemap index for sitemap paths
+ page := int(ctx.ParamsInt64("idx"))
+ isSitemap := ctx.Params("idx") != ""
+ if page <= 1 {
+ page = ctx.FormInt("page")
+ }
+
+ if page <= 0 {
+ page = 1
+ }
+
+ if isSitemap {
+ opts.PageSize = setting.UI.SitemapPagingNum
+ }
+
+ var (
+ repos []*repo_model.Repository
+ count int64
+ err error
+ orderBy db.SearchOrderBy
+ )
+
+ sortOrder := ctx.FormString("sort")
+ if sortOrder == "" {
+ sortOrder = setting.UI.ExploreDefaultSort
+ }
+
+ if order, ok := repo_model.OrderByFlatMap[sortOrder]; ok {
+ orderBy = order
+ } else {
+ sortOrder = "recentupdate"
+ orderBy = db.SearchOrderByRecentUpdated
+ }
+ ctx.Data["SortType"] = sortOrder
+
+ keyword := ctx.FormTrim("q")
+
+ ctx.Data["OnlyShowRelevant"] = opts.OnlyShowRelevant
+
+ topicOnly := ctx.FormBool("topic")
+ ctx.Data["TopicOnly"] = topicOnly
+
+ language := ctx.FormTrim("language")
+ ctx.Data["Language"] = language
+
+ archived := ctx.FormOptionalBool("archived")
+ ctx.Data["IsArchived"] = archived
+
+ fork := ctx.FormOptionalBool("fork")
+ ctx.Data["IsFork"] = fork
+
+ mirror := ctx.FormOptionalBool("mirror")
+ ctx.Data["IsMirror"] = mirror
+
+ template := ctx.FormOptionalBool("template")
+ ctx.Data["IsTemplate"] = template
+
+ private := ctx.FormOptionalBool("private")
+ ctx.Data["IsPrivate"] = private
+
+ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: opts.PageSize,
+ },
+ Actor: ctx.Doer,
+ OrderBy: orderBy,
+ Private: opts.Private,
+ Keyword: keyword,
+ OwnerID: opts.OwnerID,
+ AllPublic: true,
+ AllLimited: true,
+ TopicOnly: topicOnly,
+ Language: language,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ OnlyShowRelevant: opts.OnlyShowRelevant,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+ if isSitemap {
+ m := sitemap.NewSitemap()
+ for _, item := range repos {
+ m.Add(sitemap.URL{URL: item.HTMLURL(), LastMod: item.UpdatedUnix.AsTimePtr()})
+ }
+ ctx.Resp.Header().Set("Content-Type", "text/xml")
+ if _, err := m.WriteTo(ctx.Resp); err != nil {
+ log.Error("Failed writing sitemap: %v", err)
+ }
+ return
+ }
+
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Total"] = count
+ ctx.Data["Repos"] = repos
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ pager := context.NewPagination(int(count), opts.PageSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "topic", "TopicOnly")
+ pager.AddParam(ctx, "language", "Language")
+ pager.AddParamString(relevantReposOnlyParam, fmt.Sprint(opts.OnlyShowRelevant))
+ if archived.Has() {
+ pager.AddParamString("archived", fmt.Sprint(archived.Value()))
+ }
+ if fork.Has() {
+ pager.AddParamString("fork", fmt.Sprint(fork.Value()))
+ }
+ if mirror.Has() {
+ pager.AddParamString("mirror", fmt.Sprint(mirror.Value()))
+ }
+ if template.Has() {
+ pager.AddParamString("template", fmt.Sprint(template.Value()))
+ }
+ if private.Has() {
+ pager.AddParamString("private", fmt.Sprint(private.Value()))
+ }
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, opts.TplName)
+}
+
+// Repos render explore repositories page
+func Repos(ctx *context.Context) {
+ ctx.Data["UsersIsDisabled"] = setting.Service.Explore.DisableUsersPage
+ ctx.Data["Title"] = ctx.Tr("explore")
+ ctx.Data["PageIsExplore"] = true
+ ctx.Data["PageIsExploreRepositories"] = true
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ var ownerID int64
+ if ctx.Doer != nil && !ctx.Doer.IsAdmin {
+ ownerID = ctx.Doer.ID
+ }
+
+ onlyShowRelevant := setting.UI.OnlyShowRelevantRepos
+
+ _ = ctx.Req.ParseForm() // parse the form first, to prepare the ctx.Req.Form field
+ if len(ctx.Req.Form[relevantReposOnlyParam]) != 0 {
+ onlyShowRelevant = ctx.FormBool(relevantReposOnlyParam)
+ }
+
+ RenderRepoSearch(ctx, &RepoSearchOptions{
+ PageSize: setting.UI.ExplorePagingNum,
+ OwnerID: ownerID,
+ Private: ctx.Doer != nil,
+ TplName: tplExploreRepos,
+ OnlyShowRelevant: onlyShowRelevant,
+ })
+}
diff --git a/routers/web/explore/topic.go b/routers/web/explore/topic.go
new file mode 100644
index 0000000..95fecfe
--- /dev/null
+++ b/routers/web/explore/topic.go
@@ -0,0 +1,41 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+)
+
+// TopicSearch search for creating topic
+func TopicSearch(ctx *context.Context) {
+ opts := &repo_model.FindTopicOptions{
+ Keyword: ctx.FormString("q"),
+ ListOptions: db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ }
+
+ topics, total, err := repo_model.FindTopics(ctx, opts)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ topicResponses := make([]*api.TopicResponse, len(topics))
+ for i, topic := range topics {
+ topicResponses[i] = convert.ToTopicResponse(topic)
+ }
+
+ ctx.SetTotalCountHeader(total)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "topics": topicResponses,
+ })
+}
diff --git a/routers/web/explore/user.go b/routers/web/explore/user.go
new file mode 100644
index 0000000..b79a79f
--- /dev/null
+++ b/routers/web/explore/user.go
@@ -0,0 +1,163 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "bytes"
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sitemap"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // tplExploreUsers explore users page template
+ tplExploreUsers base.TplName = "explore/users"
+)
+
+var nullByte = []byte{0x00}
+
+func isKeywordValid(keyword string) bool {
+ return !bytes.Contains([]byte(keyword), nullByte)
+}
+
+// RenderUserSearch render user search page
+func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions, tplName base.TplName) {
+ // Sitemap index for sitemap paths
+ opts.Page = int(ctx.ParamsInt64("idx"))
+ isSitemap := ctx.Params("idx") != ""
+ if opts.Page <= 1 {
+ opts.Page = ctx.FormInt("page")
+ }
+ if opts.Page <= 1 {
+ opts.Page = 1
+ }
+
+ if isSitemap {
+ opts.PageSize = setting.UI.SitemapPagingNum
+ }
+
+ var (
+ users []*user_model.User
+ count int64
+ err error
+ orderBy db.SearchOrderBy
+ )
+
+ // we can not set orderBy to `models.SearchOrderByXxx`, because there may be a JOIN in the statement, different tables may have the same name columns
+
+ sortOrder := ctx.FormString("sort")
+ if sortOrder == "" {
+ sortOrder = setting.UI.ExploreDefaultSort
+ }
+ ctx.Data["SortType"] = sortOrder
+
+ switch sortOrder {
+ case "newest":
+ orderBy = "`user`.id DESC"
+ case "oldest":
+ orderBy = "`user`.id ASC"
+ case "leastupdate":
+ orderBy = "`user`.updated_unix ASC"
+ case "reversealphabetically":
+ orderBy = "`user`.name DESC"
+ case "lastlogin":
+ orderBy = "`user`.last_login_unix ASC"
+ case "reverselastlogin":
+ orderBy = "`user`.last_login_unix DESC"
+ case "alphabetically":
+ orderBy = "`user`.name ASC"
+ case "recentupdate":
+ fallthrough
+ default:
+ // in case the sortType is not valid, we set it to recentupdate
+ sortOrder = "recentupdate"
+ ctx.Data["SortType"] = "recentupdate"
+ orderBy = "`user`.updated_unix DESC"
+ }
+
+ if opts.SupportedSortOrders != nil && !opts.SupportedSortOrders.Contains(sortOrder) {
+ ctx.NotFound("unsupported sort order", nil)
+ return
+ }
+
+ opts.Keyword = ctx.FormTrim("q")
+ opts.OrderBy = orderBy
+ if len(opts.Keyword) == 0 || isKeywordValid(opts.Keyword) {
+ users, count, err = user_model.SearchUsers(ctx, opts)
+ if err != nil {
+ ctx.ServerError("SearchUsers", err)
+ return
+ }
+ }
+ if isSitemap {
+ m := sitemap.NewSitemap()
+ for _, item := range users {
+ m.Add(sitemap.URL{URL: item.HTMLURL(), LastMod: item.UpdatedUnix.AsTimePtr()})
+ }
+ ctx.Resp.Header().Set("Content-Type", "text/xml")
+ if _, err := m.WriteTo(ctx.Resp); err != nil {
+ log.Error("Failed writing sitemap: %v", err)
+ }
+ return
+ }
+
+ ctx.Data["Keyword"] = opts.Keyword
+ ctx.Data["Total"] = count
+ ctx.Data["Users"] = users
+ ctx.Data["UsersTwoFaStatus"] = user_model.UserList(users).GetTwoFaStatus(ctx)
+ ctx.Data["ShowUserEmail"] = setting.UI.ShowUserEmail
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ for paramKey, paramVal := range opts.ExtraParamStrings {
+ pager.AddParamString(paramKey, paramVal)
+ }
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplName)
+}
+
+// Users render explore users page
+func Users(ctx *context.Context) {
+ if setting.Service.Explore.DisableUsersPage {
+ ctx.Redirect(setting.AppSubURL + "/explore/repos")
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("explore")
+ ctx.Data["PageIsExplore"] = true
+ ctx.Data["PageIsExploreUsers"] = true
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ supportedSortOrders := container.SetOf(
+ "newest",
+ "oldest",
+ "alphabetically",
+ "reversealphabetically",
+ )
+ sortOrder := ctx.FormString("sort")
+ if sortOrder == "" {
+ sortOrder = "newest"
+ ctx.SetFormString("sort", sortOrder)
+ }
+
+ RenderUserSearch(ctx, &user_model.SearchUserOptions{
+ Actor: ctx.Doer,
+ Type: user_model.UserTypeIndividual,
+ ListOptions: db.ListOptions{PageSize: setting.UI.ExplorePagingNum},
+ IsActive: optional.Some(true),
+ Visible: []structs.VisibleType{structs.VisibleTypePublic, structs.VisibleTypeLimited, structs.VisibleTypePrivate},
+
+ SupportedSortOrders: supportedSortOrders,
+ }, tplExploreUsers)
+}
diff --git a/routers/web/feed/branch.go b/routers/web/feed/branch.go
new file mode 100644
index 0000000..80ce2ad
--- /dev/null
+++ b/routers/web/feed/branch.go
@@ -0,0 +1,50 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+)
+
+// ShowBranchFeed shows tags and/or releases on the repo as RSS / Atom feed
+func ShowBranchFeed(ctx *context.Context, repo *repo.Repository, formatType string) {
+ commits, err := ctx.Repo.Commit.CommitsByRange(0, 10, "")
+ if err != nil {
+ ctx.ServerError("ShowBranchFeed", err)
+ return
+ }
+
+ title := fmt.Sprintf("Latest commits for branch %s", ctx.Repo.BranchName)
+ link := &feeds.Link{Href: repo.HTMLURL() + "/" + ctx.Repo.BranchNameSubURL()}
+
+ feed := &feeds.Feed{
+ Title: title,
+ Link: link,
+ Description: repo.Description,
+ Created: time.Now(),
+ }
+
+ for _, commit := range commits {
+ feed.Items = append(feed.Items, &feeds.Item{
+ Id: commit.ID.String(),
+ Title: strings.TrimSpace(strings.Split(commit.Message(), "\n")[0]),
+ Link: &feeds.Link{Href: repo.HTMLURL() + "/commit/" + commit.ID.String()},
+ Author: &feeds.Author{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Description: commit.Message(),
+ Content: commit.Message(),
+ })
+ }
+
+ writeFeed(ctx, feed, formatType)
+}
diff --git a/routers/web/feed/convert.go b/routers/web/feed/convert.go
new file mode 100644
index 0000000..0f43346
--- /dev/null
+++ b/routers/web/feed/convert.go
@@ -0,0 +1,332 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "fmt"
+ "html"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+ "github.com/jaytaylor/html2text"
+)
+
+func toBranchLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/src/branch/" + util.PathEscapeSegments(act.GetBranch())
+}
+
+func toTagLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/src/tag/" + util.PathEscapeSegments(act.GetTag())
+}
+
+func toIssueLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/issues/" + url.PathEscape(act.GetIssueInfos()[0])
+}
+
+func toPullLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/pulls/" + url.PathEscape(act.GetIssueInfos()[0])
+}
+
+func toSrcLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/src/" + util.PathEscapeSegments(act.GetBranch())
+}
+
+func toReleaseLink(ctx *context.Context, act *activities_model.Action) string {
+ return act.GetRepoAbsoluteLink(ctx) + "/releases/tag/" + util.PathEscapeSegments(act.GetBranch())
+}
+
+// renderMarkdown creates a minimal markdown render context from an action.
+// If rendering fails, the original markdown text is returned
+func renderMarkdown(ctx *context.Context, act *activities_model.Action, content string) template.HTML {
+ markdownCtx := &markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ Base: act.GetRepoLink(ctx),
+ },
+ Type: markdown.MarkupName,
+ Metas: map[string]string{
+ "user": act.GetRepoUserName(ctx),
+ "repo": act.GetRepoName(ctx),
+ },
+ }
+ markdown, err := markdown.RenderString(markdownCtx, content)
+ if err != nil {
+ return templates.SanitizeHTML(content) // old code did so: use SanitizeHTML to render in tmpl
+ }
+ return markdown
+}
+
+// feedActionsToFeedItems convert gitea's Action feed to feeds Item
+func feedActionsToFeedItems(ctx *context.Context, actions activities_model.ActionList) (items []*feeds.Item, err error) {
+ for _, act := range actions {
+ act.LoadActUser(ctx)
+
+ // TODO: the code seems quite strange (maybe not right)
+ // sometimes it uses text content but sometimes it uses HTML content
+ // it should clearly defines which kind of content it should use for the feed items: plan text or rich HTML
+ var title, desc string
+ var content template.HTML
+
+ link := &feeds.Link{Href: act.GetCommentHTMLURL(ctx)}
+
+ // title
+ title = act.ActUser.GetDisplayName() + " "
+ var titleExtra template.HTML
+ switch act.OpType {
+ case activities_model.ActionCreateRepo:
+ titleExtra = ctx.Locale.Tr("action.create_repo", act.GetRepoAbsoluteLink(ctx), act.ShortRepoPath(ctx))
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ case activities_model.ActionRenameRepo:
+ titleExtra = ctx.Locale.Tr("action.rename_repo", act.GetContent(), act.GetRepoAbsoluteLink(ctx), act.ShortRepoPath(ctx))
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ case activities_model.ActionCommitRepo:
+ link.Href = toBranchLink(ctx, act)
+ if len(act.Content) != 0 {
+ titleExtra = ctx.Locale.Tr("action.commit_repo", act.GetRepoAbsoluteLink(ctx), link.Href, act.GetBranch(), act.ShortRepoPath(ctx))
+ } else {
+ titleExtra = ctx.Locale.Tr("action.create_branch", act.GetRepoAbsoluteLink(ctx), link.Href, act.GetBranch(), act.ShortRepoPath(ctx))
+ }
+ case activities_model.ActionCreateIssue:
+ link.Href = toIssueLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.create_issue", link.Href, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionCreatePullRequest:
+ link.Href = toPullLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.create_pull_request", link.Href, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionTransferRepo:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.transfer_repo", act.GetContent(), act.GetRepoAbsoluteLink(ctx), act.ShortRepoPath(ctx))
+ case activities_model.ActionPushTag:
+ link.Href = toTagLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.push_tag", act.GetRepoAbsoluteLink(ctx), link.Href, act.GetTag(), act.ShortRepoPath(ctx))
+ case activities_model.ActionCommentIssue:
+ issueLink := toIssueLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = issueLink
+ }
+ titleExtra = ctx.Locale.Tr("action.comment_issue", issueLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionMergePullRequest:
+ pullLink := toPullLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = pullLink
+ }
+ titleExtra = ctx.Locale.Tr("action.merge_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionAutoMergePullRequest:
+ pullLink := toPullLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = pullLink
+ }
+ titleExtra = ctx.Locale.Tr("action.auto_merge_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionCloseIssue:
+ issueLink := toIssueLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = issueLink
+ }
+ titleExtra = ctx.Locale.Tr("action.close_issue", issueLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionReopenIssue:
+ issueLink := toIssueLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = issueLink
+ }
+ titleExtra = ctx.Locale.Tr("action.reopen_issue", issueLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionClosePullRequest:
+ pullLink := toPullLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = pullLink
+ }
+ titleExtra = ctx.Locale.Tr("action.close_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionReopenPullRequest:
+ pullLink := toPullLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = pullLink
+ }
+ titleExtra = ctx.Locale.Tr("action.reopen_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionDeleteTag:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.delete_tag", act.GetRepoAbsoluteLink(ctx), act.GetTag(), act.ShortRepoPath(ctx))
+ case activities_model.ActionDeleteBranch:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.delete_branch", act.GetRepoAbsoluteLink(ctx), html.EscapeString(act.GetBranch()), act.ShortRepoPath(ctx))
+ case activities_model.ActionMirrorSyncPush:
+ srcLink := toSrcLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = srcLink
+ }
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_push", act.GetRepoAbsoluteLink(ctx), srcLink, act.GetBranch(), act.ShortRepoPath(ctx))
+ case activities_model.ActionMirrorSyncCreate:
+ srcLink := toSrcLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = srcLink
+ }
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_create", act.GetRepoAbsoluteLink(ctx), srcLink, act.GetBranch(), act.ShortRepoPath(ctx))
+ case activities_model.ActionMirrorSyncDelete:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.mirror_sync_delete", act.GetRepoAbsoluteLink(ctx), act.GetBranch(), act.ShortRepoPath(ctx))
+ case activities_model.ActionApprovePullRequest:
+ pullLink := toPullLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.approve_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionRejectPullRequest:
+ pullLink := toPullLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.reject_pull_request", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionCommentPull:
+ pullLink := toPullLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.comment_pull", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx))
+ case activities_model.ActionPublishRelease:
+ releaseLink := toReleaseLink(ctx, act)
+ if link.Href == "#" {
+ link.Href = releaseLink
+ }
+ titleExtra = ctx.Locale.Tr("action.publish_release", act.GetRepoAbsoluteLink(ctx), releaseLink, act.ShortRepoPath(ctx), act.Content)
+ case activities_model.ActionPullReviewDismissed:
+ pullLink := toPullLink(ctx, act)
+ titleExtra = ctx.Locale.Tr("action.review_dismissed", pullLink, act.GetIssueInfos()[0], act.ShortRepoPath(ctx), act.GetIssueInfos()[1])
+ case activities_model.ActionStarRepo:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.starred_repo", act.GetRepoAbsoluteLink(ctx), act.GetRepoPath(ctx))
+ case activities_model.ActionWatchRepo:
+ link.Href = act.GetRepoAbsoluteLink(ctx)
+ titleExtra = ctx.Locale.Tr("action.watched_repo", act.GetRepoAbsoluteLink(ctx), act.GetRepoPath(ctx))
+ default:
+ return nil, fmt.Errorf("unknown action type: %v", act.OpType)
+ }
+
+ // description & content
+ {
+ switch act.OpType {
+ case activities_model.ActionCommitRepo, activities_model.ActionMirrorSyncPush:
+ push := templates.ActionContent2Commits(act)
+
+ for _, commit := range push.Commits {
+ if len(desc) != 0 {
+ desc += "\n\n"
+ }
+ desc += fmt.Sprintf("<a href=\"%s\">%s</a>\n%s",
+ html.EscapeString(fmt.Sprintf("%s/commit/%s", act.GetRepoAbsoluteLink(ctx), commit.Sha1)),
+ commit.Sha1,
+ templates.RenderCommitMessage(ctx, commit.Message, nil),
+ )
+ }
+
+ if push.Len > 1 {
+ link = &feeds.Link{Href: fmt.Sprintf("%s/%s", setting.AppSubURL, push.CompareURL)}
+ } else if push.Len == 1 {
+ link = &feeds.Link{Href: fmt.Sprintf("%s/commit/%s", act.GetRepoAbsoluteLink(ctx), push.Commits[0].Sha1)}
+ }
+
+ case activities_model.ActionCreateIssue, activities_model.ActionCreatePullRequest:
+ desc = strings.Join(act.GetIssueInfos(), "#")
+ content = renderMarkdown(ctx, act, act.GetIssueContent(ctx))
+ case activities_model.ActionCommentIssue, activities_model.ActionApprovePullRequest, activities_model.ActionRejectPullRequest, activities_model.ActionCommentPull:
+ desc = act.GetIssueTitle(ctx)
+ comment := act.GetIssueInfos()[1]
+ if len(comment) != 0 {
+ desc += "\n\n" + string(renderMarkdown(ctx, act, comment))
+ }
+ case activities_model.ActionMergePullRequest, activities_model.ActionAutoMergePullRequest:
+ desc = act.GetIssueInfos()[1]
+ case activities_model.ActionCloseIssue, activities_model.ActionReopenIssue, activities_model.ActionClosePullRequest, activities_model.ActionReopenPullRequest:
+ desc = act.GetIssueTitle(ctx)
+ case activities_model.ActionPullReviewDismissed:
+ desc = ctx.Locale.TrString("action.review_dismissed_reason") + "\n\n" + act.GetIssueInfos()[2]
+ }
+ }
+ if len(content) == 0 {
+ content = templates.SanitizeHTML(desc)
+ }
+
+ // It's a common practice for feed generators to use plain text titles.
+ // See https://codeberg.org/forgejo/forgejo/pulls/1595
+ plainTitle, err := html2text.FromString(title+" "+string(titleExtra), html2text.Options{OmitLinks: true})
+ if err != nil {
+ return nil, err
+ }
+
+ items = append(items, &feeds.Item{
+ Title: plainTitle,
+ Link: link,
+ Description: desc,
+ IsPermaLink: "false",
+ Author: &feeds.Author{
+ Name: act.ActUser.GetDisplayName(),
+ Email: act.ActUser.GetEmail(),
+ },
+ Id: fmt.Sprintf("%v: %v", strconv.FormatInt(act.ID, 10), link.Href),
+ Created: act.CreatedUnix.AsTime(),
+ Content: string(content),
+ })
+ }
+ return items, err
+}
+
+// GetFeedType return if it is a feed request and altered name and feed type.
+func GetFeedType(name string, req *http.Request) (bool, string, string) {
+ if strings.HasSuffix(name, ".rss") ||
+ strings.Contains(req.Header.Get("Accept"), "application/rss+xml") {
+ return true, strings.TrimSuffix(name, ".rss"), "rss"
+ }
+
+ if strings.HasSuffix(name, ".atom") ||
+ strings.Contains(req.Header.Get("Accept"), "application/atom+xml") {
+ return true, strings.TrimSuffix(name, ".atom"), "atom"
+ }
+
+ return false, name, ""
+}
+
+// feedActionsToFeedItems convert gitea's Repo's Releases to feeds Item
+func releasesToFeedItems(ctx *context.Context, releases []*repo_model.Release) (items []*feeds.Item, err error) {
+ for _, rel := range releases {
+ err := rel.LoadAttributes(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ var title string
+ var content template.HTML
+
+ if rel.IsTag {
+ title = rel.TagName
+ } else {
+ title = rel.Title
+ }
+
+ link := &feeds.Link{Href: rel.HTMLURL()}
+ content, err = markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ Base: rel.Repo.Link(),
+ },
+ Metas: rel.Repo.ComposeMetas(ctx),
+ }, rel.Note)
+ if err != nil {
+ return nil, err
+ }
+
+ items = append(items, &feeds.Item{
+ Title: title,
+ Link: link,
+ Created: rel.CreatedUnix.AsTime(),
+ Author: &feeds.Author{
+ Name: rel.Publisher.GetDisplayName(),
+ Email: rel.Publisher.GetEmail(),
+ },
+ Id: fmt.Sprintf("%v: %v", strconv.FormatInt(rel.ID, 10), link.Href),
+ Content: string(content),
+ })
+ }
+
+ return items, err
+}
diff --git a/routers/web/feed/file.go b/routers/web/feed/file.go
new file mode 100644
index 0000000..1ab768f
--- /dev/null
+++ b/routers/web/feed/file.go
@@ -0,0 +1,62 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "fmt"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+)
+
+// ShowFileFeed shows tags and/or releases on the repo as RSS / Atom feed
+func ShowFileFeed(ctx *context.Context, repo *repo.Repository, formatType string) {
+ fileName := ctx.Repo.TreePath
+ if len(fileName) == 0 {
+ return
+ }
+ commits, err := ctx.Repo.GitRepo.CommitsByFileAndRange(
+ git.CommitsByFileAndRangeOptions{
+ Revision: ctx.Repo.RefName,
+ File: fileName,
+ Page: 1,
+ })
+ if err != nil {
+ ctx.ServerError("ShowBranchFeed", err)
+ return
+ }
+
+ title := fmt.Sprintf("Latest commits for file %s", ctx.Repo.TreePath)
+
+ link := &feeds.Link{Href: repo.HTMLURL() + "/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)}
+
+ feed := &feeds.Feed{
+ Title: title,
+ Link: link,
+ Description: repo.Description,
+ Created: time.Now(),
+ }
+
+ for _, commit := range commits {
+ feed.Items = append(feed.Items, &feeds.Item{
+ Id: commit.ID.String(),
+ Title: strings.TrimSpace(strings.Split(commit.Message(), "\n")[0]),
+ Link: &feeds.Link{Href: repo.HTMLURL() + "/commit/" + commit.ID.String()},
+ Author: &feeds.Author{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Description: commit.Message(),
+ Content: commit.Message(),
+ })
+ }
+
+ writeFeed(ctx, feed, formatType)
+}
diff --git a/routers/web/feed/profile.go b/routers/web/feed/profile.go
new file mode 100644
index 0000000..08cbcd9
--- /dev/null
+++ b/routers/web/feed/profile.go
@@ -0,0 +1,87 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+)
+
+// ShowUserFeedRSS show user activity as RSS feed
+func ShowUserFeedRSS(ctx *context.Context) {
+ showUserFeed(ctx, "rss")
+}
+
+// ShowUserFeedAtom show user activity as Atom feed
+func ShowUserFeedAtom(ctx *context.Context) {
+ showUserFeed(ctx, "atom")
+}
+
+// showUserFeed show user activity as RSS / Atom feed
+func showUserFeed(ctx *context.Context, formatType string) {
+ includePrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == ctx.ContextUser.ID)
+
+ actions, _, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
+ RequestedUser: ctx.ContextUser,
+ Actor: ctx.Doer,
+ IncludePrivate: includePrivate,
+ OnlyPerformedBy: !ctx.ContextUser.IsOrganization(),
+ IncludeDeleted: false,
+ Date: ctx.FormString("date"),
+ })
+ if err != nil {
+ ctx.ServerError("GetFeeds", err)
+ return
+ }
+
+ ctxUserDescription, err := markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ Links: markup.Links{
+ Base: ctx.ContextUser.HTMLURL(),
+ },
+ Metas: map[string]string{
+ "user": ctx.ContextUser.GetDisplayName(),
+ },
+ }, ctx.ContextUser.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ feed := &feeds.Feed{
+ Title: ctx.Locale.TrString("home.feed_of", ctx.ContextUser.DisplayName()),
+ Link: &feeds.Link{Href: ctx.ContextUser.HTMLURL()},
+ Description: string(ctxUserDescription),
+ Created: time.Now(),
+ }
+
+ feed.Items, err = feedActionsToFeedItems(ctx, actions)
+ if err != nil {
+ ctx.ServerError("convert feed", err)
+ return
+ }
+
+ writeFeed(ctx, feed, formatType)
+}
+
+// writeFeed write a feeds.Feed as atom or rss to ctx.Resp
+func writeFeed(ctx *context.Context, feed *feeds.Feed, formatType string) {
+ if formatType == "atom" {
+ ctx.Resp.Header().Set("Content-Type", "application/atom+xml;charset=utf-8")
+ if err := feed.WriteAtom(ctx.Resp); err != nil {
+ ctx.ServerError("Render Atom failed", err)
+ }
+ } else {
+ ctx.Resp.Header().Set("Content-Type", "application/rss+xml;charset=utf-8")
+ if err := feed.WriteRss(ctx.Resp); err != nil {
+ ctx.ServerError("Render RSS failed", err)
+ }
+ }
+}
diff --git a/routers/web/feed/release.go b/routers/web/feed/release.go
new file mode 100644
index 0000000..fb6e3ad
--- /dev/null
+++ b/routers/web/feed/release.go
@@ -0,0 +1,52 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+)
+
+// shows tags and/or releases on the repo as RSS / Atom feed
+func ShowReleaseFeed(ctx *context.Context, repo *repo_model.Repository, isReleasesOnly bool, formatType string) {
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeTags: !isReleasesOnly,
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetReleasesByRepoID", err)
+ return
+ }
+
+ var title string
+ var link *feeds.Link
+
+ if isReleasesOnly {
+ title = ctx.Locale.TrString("repo.release.releases_for", repo.FullName())
+ link = &feeds.Link{Href: repo.HTMLURL() + "/release"}
+ } else {
+ title = ctx.Locale.TrString("repo.release.tags_for", repo.FullName())
+ link = &feeds.Link{Href: repo.HTMLURL() + "/tags"}
+ }
+
+ feed := &feeds.Feed{
+ Title: title,
+ Link: link,
+ Description: repo.Description,
+ Created: time.Now(),
+ }
+
+ feed.Items, err = releasesToFeedItems(ctx, releases)
+ if err != nil {
+ ctx.ServerError("releasesToFeedItems", err)
+ return
+ }
+
+ writeFeed(ctx, feed, formatType)
+}
diff --git a/routers/web/feed/render.go b/routers/web/feed/render.go
new file mode 100644
index 0000000..dc99fb4
--- /dev/null
+++ b/routers/web/feed/render.go
@@ -0,0 +1,19 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "code.gitea.io/gitea/services/context"
+)
+
+// RenderBranchFeed render format for branch or file
+func RenderBranchFeed(feedType string) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ if ctx.Repo.TreePath == "" {
+ ShowBranchFeed(ctx, ctx.Repo.Repository, feedType)
+ } else {
+ ShowFileFeed(ctx, ctx.Repo.Repository, feedType)
+ }
+ }
+}
diff --git a/routers/web/feed/repo.go b/routers/web/feed/repo.go
new file mode 100644
index 0000000..a0033c7
--- /dev/null
+++ b/routers/web/feed/repo.go
@@ -0,0 +1,44 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package feed
+
+import (
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/gorilla/feeds"
+)
+
+// ShowRepoFeed shows user activity on the repo as RSS / Atom feed
+func ShowRepoFeed(ctx *context.Context, repo *repo_model.Repository, formatType string) {
+ actions, _, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
+ OnlyPerformedByActor: true,
+ RequestedRepo: repo,
+ Actor: ctx.Doer,
+ IncludePrivate: true,
+ Date: ctx.FormString("date"),
+ })
+ if err != nil {
+ ctx.ServerError("GetFeeds", err)
+ return
+ }
+
+ feed := &feeds.Feed{
+ Title: ctx.Locale.TrString("home.feed_of", repo.FullName()),
+ Link: &feeds.Link{Href: repo.HTMLURL()},
+ Description: repo.Description,
+ Created: time.Now(),
+ }
+
+ feed.Items, err = feedActionsToFeedItems(ctx, actions)
+ if err != nil {
+ ctx.ServerError("convert feed", err)
+ return
+ }
+
+ writeFeed(ctx, feed, formatType)
+}
diff --git a/routers/web/githttp.go b/routers/web/githttp.go
new file mode 100644
index 0000000..5f1dedc
--- /dev/null
+++ b/routers/web/githttp.go
@@ -0,0 +1,42 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+)
+
+func requireSignIn(ctx *context.Context) {
+ if !setting.Service.RequireSignInView {
+ return
+ }
+
+ // rely on the results of Contexter
+ if !ctx.IsSigned {
+ // TODO: support digit auth - which would be Authorization header with digit
+ ctx.Resp.Header().Set("WWW-Authenticate", `Basic realm="Gitea"`)
+ ctx.Error(http.StatusUnauthorized)
+ }
+}
+
+func gitHTTPRouters(m *web.Route) {
+ m.Group("", func() {
+ m.Methods("POST,OPTIONS", "/git-upload-pack", repo.ServiceUploadPack)
+ m.Methods("POST,OPTIONS", "/git-receive-pack", repo.ServiceReceivePack)
+ m.Methods("GET,OPTIONS", "/info/refs", repo.GetInfoRefs)
+ m.Methods("GET,OPTIONS", "/HEAD", repo.GetTextFile("HEAD"))
+ m.Methods("GET,OPTIONS", "/objects/info/alternates", repo.GetTextFile("objects/info/alternates"))
+ m.Methods("GET,OPTIONS", "/objects/info/http-alternates", repo.GetTextFile("objects/info/http-alternates"))
+ m.Methods("GET,OPTIONS", "/objects/info/packs", repo.GetInfoPacks)
+ m.Methods("GET,OPTIONS", "/objects/info/{file:[^/]*}", repo.GetTextFile(""))
+ m.Methods("GET,OPTIONS", "/objects/{head:[0-9a-f]{2}}/{hash:[0-9a-f]{38,62}}", repo.GetLooseObject)
+ m.Methods("GET,OPTIONS", "/objects/pack/pack-{file:[0-9a-f]{40,64}}.pack", repo.GetPackFile)
+ m.Methods("GET,OPTIONS", "/objects/pack/pack-{file:[0-9a-f]{40,64}}.idx", repo.GetIdxFile)
+ }, ignSignInAndCsrf, requireSignIn, repo.HTTPGitEnabledHandler, repo.CorsHandler(), context.UserAssignmentWeb())
+}
diff --git a/routers/web/goget.go b/routers/web/goget.go
new file mode 100644
index 0000000..8d5612e
--- /dev/null
+++ b/routers/web/goget.go
@@ -0,0 +1,93 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "fmt"
+ "html"
+ "net/http"
+ "net/url"
+ "path"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+func goGet(ctx *context.Context) {
+ if ctx.Req.Method != "GET" || len(ctx.Req.URL.RawQuery) < 8 || ctx.FormString("go-get") != "1" {
+ return
+ }
+
+ parts := strings.SplitN(ctx.Req.URL.EscapedPath(), "/", 4)
+
+ if len(parts) < 3 {
+ return
+ }
+
+ ownerName := parts[1]
+ repoName := parts[2]
+
+ // Quick responses appropriate go-get meta with status 200
+ // regardless of if user have access to the repository,
+ // or the repository does not exist at all.
+ // This is particular a workaround for "go get" command which does not respect
+ // .netrc file.
+
+ trimmedRepoName := strings.TrimSuffix(repoName, ".git")
+
+ if ownerName == "" || trimmedRepoName == "" {
+ _, _ = ctx.Write([]byte(`<!doctype html>
+<html>
+ <body>
+ invalid import path
+ </body>
+</html>
+`))
+ ctx.Status(http.StatusBadRequest)
+ return
+ }
+ branchName := setting.Repository.DefaultBranch
+
+ repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, ownerName, repoName)
+ if err == nil && len(repo.DefaultBranch) > 0 {
+ branchName = repo.DefaultBranch
+ }
+ prefix := setting.AppURL + path.Join(url.PathEscape(ownerName), url.PathEscape(repoName), "src", "branch", util.PathEscapeSegments(branchName))
+
+ appURL, _ := url.Parse(setting.AppURL)
+
+ insecure := ""
+ if appURL.Scheme == string(setting.HTTP) {
+ insecure = "--insecure "
+ }
+
+ goGetImport := context.ComposeGoGetImport(ownerName, trimmedRepoName)
+
+ var cloneURL string
+ if setting.Repository.GoGetCloneURLProtocol == "ssh" {
+ cloneURL = repo_model.ComposeSSHCloneURL(ownerName, repoName)
+ } else {
+ cloneURL = repo_model.ComposeHTTPSCloneURL(ownerName, repoName)
+ }
+ goImportContent := fmt.Sprintf("%s git %s", goGetImport, cloneURL /*CloneLink*/)
+ goSourceContent := fmt.Sprintf("%s _ %s %s", goGetImport, prefix+"{/dir}" /*GoDocDirectory*/, prefix+"{/dir}/{file}#L{line}" /*GoDocFile*/)
+ goGetCli := fmt.Sprintf("go get %s%s", insecure, goGetImport)
+
+ res := fmt.Sprintf(`<!doctype html>
+<html>
+ <head>
+ <meta name="go-import" content="%s">
+ <meta name="go-source" content="%s">
+ </head>
+ <body>
+ %s
+ </body>
+</html>`, html.EscapeString(goImportContent), html.EscapeString(goSourceContent), html.EscapeString(goGetCli))
+
+ ctx.RespHeader().Set("Content-Type", "text/html")
+ _, _ = ctx.Write([]byte(res))
+}
diff --git a/routers/web/healthcheck/check.go b/routers/web/healthcheck/check.go
new file mode 100644
index 0000000..83dfe62
--- /dev/null
+++ b/routers/web/healthcheck/check.go
@@ -0,0 +1,140 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package healthcheck
+
+import (
+ "context"
+ "net/http"
+ "os"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+type status string
+
+const (
+ // Pass healthy (acceptable aliases: "ok" to support Node's Terminus and "up" for Java's SpringBoot)
+ // fail unhealthy (acceptable aliases: "error" to support Node's Terminus and "down" for Java's SpringBoot), and
+ // warn healthy, with some concerns.
+ //
+ // ref https://datatracker.ietf.org/doc/html/draft-inadarei-api-health-check#section-3.1
+ // status: (required) indicates whether the service status is acceptable
+ // or not. API publishers SHOULD use following values for the field:
+ // The value of the status field is case-insensitive and is tightly
+ // related with the HTTP Response code returned by the health endpoint.
+ // For "pass" status, HTTP Response code in the 2xx-3xx range MUST be
+ // used. For "fail" status, HTTP Response code in the 4xx-5xx range
+ // MUST be used. In case of the "warn" status, endpoints MUST return
+ // HTTP status in the 2xx-3xx range, and additional information SHOULD
+ // be provided, utilizing optional fields of the Response.
+ Pass status = "pass"
+ Fail status = "fail"
+ warn status = "warn"
+)
+
+func (s status) ToHTTPStatus() int {
+ if s == Pass || s == warn {
+ return http.StatusOK
+ }
+ return http.StatusFailedDependency
+}
+
+type checks map[string][]componentStatus
+
+// Response is the data returned by the health endpoint, which will be marshaled to JSON format
+type Response struct {
+ Status status `json:"status"`
+ Description string `json:"description"` // a human-friendly description of the service
+ Checks checks `json:"checks,omitempty"` // The Checks Object, should be omitted on installation route
+}
+
+// componentStatus presents one status of a single check object
+// an object that provides detailed health statuses of additional downstream systems and endpoints
+// which can affect the overall health of the main API.
+type componentStatus struct {
+ Status status `json:"status"`
+ Time string `json:"time"` // the date-time, in ISO8601 format
+ Output string `json:"output,omitempty"` // this field SHOULD be omitted for "pass" state.
+}
+
+// Check is the health check API handler
+func Check(w http.ResponseWriter, r *http.Request) {
+ rsp := Response{
+ Status: Pass,
+ Description: setting.AppName,
+ Checks: make(checks),
+ }
+
+ statuses := make([]status, 0)
+ if setting.InstallLock {
+ statuses = append(statuses, checkDatabase(r.Context(), rsp.Checks))
+ statuses = append(statuses, checkCache(rsp.Checks))
+ }
+ for _, s := range statuses {
+ if s != Pass {
+ rsp.Status = Fail
+ break
+ }
+ }
+
+ data, _ := json.MarshalIndent(rsp, "", " ")
+ w.Header().Set("Content-Type", "application/json")
+ w.Header().Set("Cache-Control", "no-store")
+ w.WriteHeader(rsp.Status.ToHTTPStatus())
+ _, _ = w.Write(data)
+}
+
+// database checks gitea database status
+func checkDatabase(ctx context.Context, checks checks) status {
+ st := componentStatus{}
+ if err := db.GetEngine(ctx).Ping(); err != nil {
+ st.Status = Fail
+ st.Time = getCheckTime()
+ log.Error("database ping failed with error: %v", err)
+ } else {
+ st.Status = Pass
+ st.Time = getCheckTime()
+ }
+
+ if setting.Database.Type.IsSQLite3() && st.Status == Pass {
+ if !setting.EnableSQLite3 {
+ st.Status = Fail
+ st.Time = getCheckTime()
+ log.Error("SQLite3 health check failed with error: %v", "this Forgejo binary is built without SQLite3 enabled")
+ } else {
+ if _, err := os.Stat(setting.Database.Path); err != nil {
+ st.Status = Fail
+ st.Time = getCheckTime()
+ log.Error("SQLite3 file exists check failed with error: %v", err)
+ }
+ }
+ }
+
+ checks["database:ping"] = []componentStatus{st}
+ return st.Status
+}
+
+// cache checks gitea cache status
+func checkCache(checks checks) status {
+ st := componentStatus{}
+ if err := cache.GetCache().Ping(); err != nil {
+ st.Status = Fail
+ st.Time = getCheckTime()
+ log.Error("cache ping failed with error: %v", err)
+ } else {
+ st.Status = Pass
+ st.Time = getCheckTime()
+ }
+ checks["cache:ping"] = []componentStatus{st}
+ return st.Status
+}
+
+func getCheckTime() string {
+ return time.Now().UTC().Format(time.RFC3339)
+}
diff --git a/routers/web/home.go b/routers/web/home.go
new file mode 100644
index 0000000..d4be093
--- /dev/null
+++ b/routers/web/home.go
@@ -0,0 +1,117 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "net/http"
+ "strconv"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/sitemap"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/routers/web/auth"
+ "code.gitea.io/gitea/routers/web/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // tplHome home page template
+ tplHome base.TplName = "home"
+)
+
+// Home render home page
+func Home(ctx *context.Context) {
+ if ctx.IsSigned {
+ if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm {
+ ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
+ ctx.HTML(http.StatusOK, auth.TplActivate)
+ } else if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin {
+ log.Info("Failed authentication attempt for %s from %s", ctx.Doer.Name, ctx.RemoteAddr())
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ } else if ctx.Doer.MustChangePassword {
+ ctx.Data["Title"] = ctx.Tr("auth.must_change_password")
+ ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password"
+ middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
+ ctx.Redirect(setting.AppSubURL + "/user/settings/change_password")
+ } else {
+ user.Dashboard(ctx)
+ }
+ return
+ // Check non-logged users landing page.
+ } else if setting.LandingPageURL != setting.LandingPageHome {
+ ctx.Redirect(setting.AppSubURL + string(setting.LandingPageURL))
+ return
+ }
+
+ // Check auto-login.
+ if ctx.GetSiteCookie(setting.CookieRememberName) != "" {
+ ctx.Redirect(setting.AppSubURL + "/user/login")
+ return
+ }
+
+ ctx.Data["PageIsHome"] = true
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.HTML(http.StatusOK, tplHome)
+}
+
+// HomeSitemap renders the main sitemap
+func HomeSitemap(ctx *context.Context) {
+ m := sitemap.NewSitemapIndex()
+ if !setting.Service.Explore.DisableUsersPage {
+ _, cnt, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
+ Type: user_model.UserTypeIndividual,
+ ListOptions: db.ListOptions{PageSize: 1},
+ IsActive: optional.Some(true),
+ Visible: []structs.VisibleType{structs.VisibleTypePublic},
+ })
+ if err != nil {
+ ctx.ServerError("SearchUsers", err)
+ return
+ }
+ count := int(cnt)
+ idx := 1
+ for i := 0; i < count; i += setting.UI.SitemapPagingNum {
+ m.Add(sitemap.URL{URL: setting.AppURL + "explore/users/sitemap-" + strconv.Itoa(idx) + ".xml"})
+ idx++
+ }
+ }
+
+ _, cnt, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: 1,
+ },
+ Actor: ctx.Doer,
+ AllPublic: true,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+ count := int(cnt)
+ idx := 1
+ for i := 0; i < count; i += setting.UI.SitemapPagingNum {
+ m.Add(sitemap.URL{URL: setting.AppURL + "explore/repos/sitemap-" + strconv.Itoa(idx) + ".xml"})
+ idx++
+ }
+
+ ctx.Resp.Header().Set("Content-Type", "text/xml")
+ if _, err := m.WriteTo(ctx.Resp); err != nil {
+ log.Error("Failed writing sitemap: %v", err)
+ }
+}
+
+// NotFound render 404 page
+func NotFound(ctx *context.Context) {
+ ctx.Data["Title"] = "Page Not Found"
+ ctx.NotFound("home.NotFound", nil)
+}
diff --git a/routers/web/metrics.go b/routers/web/metrics.go
new file mode 100644
index 0000000..46c13f0
--- /dev/null
+++ b/routers/web/metrics.go
@@ -0,0 +1,33 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "crypto/subtle"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/prometheus/client_golang/prometheus/promhttp"
+)
+
+// Metrics validate auth token and render prometheus metrics
+func Metrics(resp http.ResponseWriter, req *http.Request) {
+ if setting.Metrics.Token == "" {
+ promhttp.Handler().ServeHTTP(resp, req)
+ return
+ }
+ header := req.Header.Get("Authorization")
+ if header == "" {
+ http.Error(resp, "", http.StatusUnauthorized)
+ return
+ }
+ got := []byte(header)
+ want := []byte("Bearer " + setting.Metrics.Token)
+ if subtle.ConstantTimeCompare(got, want) != 1 {
+ http.Error(resp, "", http.StatusUnauthorized)
+ return
+ }
+ promhttp.Handler().ServeHTTP(resp, req)
+}
diff --git a/routers/web/misc/markup.go b/routers/web/misc/markup.go
new file mode 100644
index 0000000..2dbbd6f
--- /dev/null
+++ b/routers/web/misc/markup.go
@@ -0,0 +1,18 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package misc
+
+import (
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+)
+
+// Markup render markup document to HTML
+func Markup(ctx *context.Context) {
+ form := web.GetForm(ctx).(*api.MarkupOption)
+ common.RenderMarkup(ctx.Base, ctx.Repo, form.Mode, form.Text, form.Context, form.FilePath, form.Wiki)
+}
diff --git a/routers/web/misc/misc.go b/routers/web/misc/misc.go
new file mode 100644
index 0000000..54c9376
--- /dev/null
+++ b/routers/web/misc/misc.go
@@ -0,0 +1,49 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package misc
+
+import (
+ "net/http"
+ "path"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+)
+
+func SSHInfo(rw http.ResponseWriter, req *http.Request) {
+ if !git.SupportProcReceive {
+ rw.WriteHeader(http.StatusNotFound)
+ return
+ }
+ rw.Header().Set("content-type", "text/json;charset=UTF-8")
+ _, err := rw.Write([]byte(`{"type":"gitea","version":1}`))
+ if err != nil {
+ log.Error("fail to write result: err: %v", err)
+ rw.WriteHeader(http.StatusInternalServerError)
+ return
+ }
+ rw.WriteHeader(http.StatusOK)
+}
+
+func DummyOK(w http.ResponseWriter, req *http.Request) {
+ w.WriteHeader(http.StatusOK)
+}
+
+func RobotsTxt(w http.ResponseWriter, req *http.Request) {
+ robotsTxt := util.FilePathJoinAbs(setting.CustomPath, "public/robots.txt")
+ if ok, _ := util.IsExist(robotsTxt); !ok {
+ robotsTxt = util.FilePathJoinAbs(setting.CustomPath, "robots.txt") // the legacy "robots.txt"
+ }
+ httpcache.SetCacheControlInHeader(w.Header(), setting.StaticCacheTime)
+ http.ServeFile(w, req, robotsTxt)
+}
+
+func StaticRedirect(target string) func(w http.ResponseWriter, req *http.Request) {
+ return func(w http.ResponseWriter, req *http.Request) {
+ http.Redirect(w, req, path.Join(setting.StaticURLPrefix, target), http.StatusMovedPermanently)
+ }
+}
diff --git a/routers/web/misc/swagger-forgejo.go b/routers/web/misc/swagger-forgejo.go
new file mode 100644
index 0000000..e3aff02
--- /dev/null
+++ b/routers/web/misc/swagger-forgejo.go
@@ -0,0 +1,19 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package misc
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+)
+
+// tplSwagger swagger page template
+const tplForgejoSwagger base.TplName = "swagger/forgejo-ui"
+
+func SwaggerForgejo(ctx *context.Context) {
+ ctx.Data["APIVersion"] = "v1"
+ ctx.HTML(http.StatusOK, tplForgejoSwagger)
+}
diff --git a/routers/web/misc/swagger.go b/routers/web/misc/swagger.go
new file mode 100644
index 0000000..5fddfa8
--- /dev/null
+++ b/routers/web/misc/swagger.go
@@ -0,0 +1,20 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package misc
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+)
+
+// tplSwagger swagger page template
+const tplSwagger base.TplName = "swagger/ui"
+
+// Swagger render swagger-ui page with v1 json
+func Swagger(ctx *context.Context) {
+ ctx.Data["APIJSONVersion"] = "v1"
+ ctx.HTML(http.StatusOK, tplSwagger)
+}
diff --git a/routers/web/nodeinfo.go b/routers/web/nodeinfo.go
new file mode 100644
index 0000000..f1cc7bf
--- /dev/null
+++ b/routers/web/nodeinfo.go
@@ -0,0 +1,32 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+type nodeInfoLinks struct {
+ Links []nodeInfoLink `json:"links"`
+}
+
+type nodeInfoLink struct {
+ Href string `json:"href"`
+ Rel string `json:"rel"`
+}
+
+// NodeInfoLinks returns links to the node info endpoint
+func NodeInfoLinks(ctx *context.Context) {
+ nodeinfolinks := &nodeInfoLinks{
+ Links: []nodeInfoLink{{
+ fmt.Sprintf("%sapi/v1/nodeinfo", setting.AppURL),
+ "http://nodeinfo.diaspora.software/ns/schema/2.1",
+ }},
+ }
+ ctx.JSON(http.StatusOK, nodeinfolinks)
+}
diff --git a/routers/web/org/home.go b/routers/web/org/home.go
new file mode 100644
index 0000000..92793d9
--- /dev/null
+++ b/routers/web/org/home.go
@@ -0,0 +1,189 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "fmt"
+ "net/http"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplOrgHome base.TplName = "org/home"
+)
+
+// Home show organization home page
+func Home(ctx *context.Context) {
+ uname := ctx.Params(":username")
+
+ if strings.HasSuffix(uname, ".keys") || strings.HasSuffix(uname, ".gpg") {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ctx.SetParams(":org", uname)
+ context.HandleOrgAssignment(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ org := ctx.Org.Organization
+
+ ctx.Data["PageIsUserProfile"] = true
+ ctx.Data["Title"] = org.DisplayName()
+
+ var orderBy db.SearchOrderBy
+ sortOrder := ctx.FormString("sort")
+ if _, ok := repo_model.OrderByFlatMap[sortOrder]; !ok {
+ sortOrder = setting.UI.ExploreDefaultSort // TODO: add new default sort order for org home?
+ }
+ ctx.Data["SortType"] = sortOrder
+ orderBy = repo_model.OrderByFlatMap[sortOrder]
+
+ keyword := ctx.FormTrim("q")
+ ctx.Data["Keyword"] = keyword
+
+ language := ctx.FormTrim("language")
+ ctx.Data["Language"] = language
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ archived := ctx.FormOptionalBool("archived")
+ ctx.Data["IsArchived"] = archived
+
+ fork := ctx.FormOptionalBool("fork")
+ ctx.Data["IsFork"] = fork
+
+ mirror := ctx.FormOptionalBool("mirror")
+ ctx.Data["IsMirror"] = mirror
+
+ template := ctx.FormOptionalBool("template")
+ ctx.Data["IsTemplate"] = template
+
+ private := ctx.FormOptionalBool("private")
+ ctx.Data["IsPrivate"] = private
+
+ var (
+ repos []*repo_model.Repository
+ count int64
+ err error
+ )
+ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.User.RepoPagingNum,
+ Page: page,
+ },
+ Keyword: keyword,
+ OwnerID: org.ID,
+ OrderBy: orderBy,
+ Private: ctx.IsSigned,
+ Actor: ctx.Doer,
+ Language: language,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+
+ opts := &organization.FindOrgMembersOpts{
+ OrgID: org.ID,
+ PublicOnly: ctx.Org.PublicMemberOnly,
+ ListOptions: db.ListOptions{Page: 1, PageSize: 25},
+ }
+ members, _, err := organization.FindOrgMembers(ctx, opts)
+ if err != nil {
+ ctx.ServerError("FindOrgMembers", err)
+ return
+ }
+
+ ctx.Data["Repos"] = repos
+ ctx.Data["Total"] = count
+ ctx.Data["Members"] = members
+ ctx.Data["Teams"] = ctx.Org.Teams
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+ ctx.Data["PageIsViewRepositories"] = true
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ pager := context.NewPagination(int(count), setting.UI.User.RepoPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParamString("language", language)
+ if archived.Has() {
+ pager.AddParamString("archived", fmt.Sprint(archived.Value()))
+ }
+ if fork.Has() {
+ pager.AddParamString("fork", fmt.Sprint(fork.Value()))
+ }
+ if mirror.Has() {
+ pager.AddParamString("mirror", fmt.Sprint(mirror.Value()))
+ }
+ if template.Has() {
+ pager.AddParamString("template", fmt.Sprint(template.Value()))
+ }
+ if private.Has() {
+ pager.AddParamString("private", fmt.Sprint(private.Value()))
+ }
+ ctx.Data["Page"] = pager
+
+ ctx.Data["ShowMemberAndTeamTab"] = ctx.Org.IsMember || len(members) > 0
+
+ profileDbRepo, profileGitRepo, profileReadmeBlob, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
+ defer profileClose()
+ prepareOrgProfileReadme(ctx, profileGitRepo, profileDbRepo, profileReadmeBlob)
+
+ ctx.HTML(http.StatusOK, tplOrgHome)
+}
+
+func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repository, profileDbRepo *repo_model.Repository, profileReadme *git.Blob) {
+ if profileGitRepo == nil || profileReadme == nil {
+ return
+ }
+
+ if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("failed to GetBlobContent: %v", err)
+ } else {
+ if profileContent, err := markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ GitRepo: profileGitRepo,
+ Links: markup.Links{
+ // Pass repo link to markdown render for the full link of media elements.
+ // The profile of default branch would be shown.
+ Base: profileDbRepo.Link(),
+ BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
+ },
+ Metas: map[string]string{"mode": "document"},
+ }, bytes); err != nil {
+ log.Error("failed to RenderString: %v", err)
+ } else {
+ ctx.Data["ProfileReadme"] = profileContent
+ }
+ }
+}
diff --git a/routers/web/org/main_test.go b/routers/web/org/main_test.go
new file mode 100644
index 0000000..92237d6
--- /dev/null
+++ b/routers/web/org/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/org/members.go b/routers/web/org/members.go
new file mode 100644
index 0000000..9a3d60e
--- /dev/null
+++ b/routers/web/org/members.go
@@ -0,0 +1,144 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // tplMembers template for organization members page
+ tplMembers base.TplName = "org/member/members"
+)
+
+// Members render organization users page
+func Members(ctx *context.Context) {
+ org := ctx.Org.Organization
+ ctx.Data["Title"] = org.FullName
+ ctx.Data["PageIsOrgMembers"] = true
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ opts := &organization.FindOrgMembersOpts{
+ OrgID: org.ID,
+ PublicOnly: true,
+ }
+
+ if ctx.Doer != nil {
+ isMember, err := ctx.Org.Organization.IsOrgMember(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "IsOrgMember")
+ return
+ }
+ opts.PublicOnly = !isMember && !ctx.Doer.IsAdmin
+ }
+ ctx.Data["PublicOnly"] = opts.PublicOnly
+
+ total, err := organization.CountOrgMembers(ctx, opts)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "CountOrgMembers")
+ return
+ }
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ pager := context.NewPagination(int(total), setting.UI.MembersPagingNum, page, 5)
+ opts.ListOptions.Page = page
+ opts.ListOptions.PageSize = setting.UI.MembersPagingNum
+ members, membersIsPublic, err := organization.FindOrgMembers(ctx, opts)
+ if err != nil {
+ ctx.ServerError("GetMembers", err)
+ return
+ }
+ ctx.Data["Page"] = pager
+ ctx.Data["Members"] = members
+ ctx.Data["MembersIsPublicMember"] = membersIsPublic
+ ctx.Data["MembersIsUserOrgOwner"] = organization.IsUserOrgOwner(ctx, members, org.ID)
+ ctx.Data["MembersTwoFaStatus"] = members.GetTwoFaStatus(ctx)
+
+ ctx.HTML(http.StatusOK, tplMembers)
+}
+
+// MembersAction response for operation to a member of organization
+func MembersAction(ctx *context.Context) {
+ uid := ctx.FormInt64("uid")
+ if uid == 0 {
+ ctx.Redirect(ctx.Org.OrgLink + "/members")
+ return
+ }
+
+ org := ctx.Org.Organization
+ var err error
+ switch ctx.Params(":action") {
+ case "private":
+ if ctx.Doer.ID != uid && !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ err = organization.ChangeOrgUserStatus(ctx, org.ID, uid, false)
+ case "public":
+ if ctx.Doer.ID != uid && !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ err = organization.ChangeOrgUserStatus(ctx, org.ID, uid, true)
+ case "remove":
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ err = models.RemoveOrgUser(ctx, org.ID, uid)
+ if organization.IsErrLastOrgOwner(err) {
+ ctx.Flash.Error(ctx.Tr("form.last_org_owner"))
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/members")
+ return
+ }
+ case "leave":
+ err = models.RemoveOrgUser(ctx, org.ID, ctx.Doer.ID)
+ if err == nil {
+ ctx.Flash.Success(ctx.Tr("form.organization_leave_success", org.DisplayName()))
+ ctx.JSON(http.StatusOK, map[string]any{
+ "redirect": "", // keep the user stay on current page, in case they want to do other operations.
+ })
+ } else if organization.IsErrLastOrgOwner(err) {
+ ctx.Flash.Error(ctx.Tr("form.last_org_owner"))
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/members")
+ } else {
+ log.Error("RemoveOrgUser(%d,%d): %v", org.ID, ctx.Doer.ID, err)
+ }
+ return
+ }
+
+ if err != nil {
+ log.Error("Action(%s): %v", ctx.Params(":action"), err)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": false,
+ "err": err.Error(),
+ })
+ return
+ }
+
+ redirect := ctx.Org.OrgLink + "/members"
+ if ctx.Params(":action") == "leave" {
+ redirect = setting.AppSubURL + "/"
+ }
+
+ ctx.JSONRedirect(redirect)
+}
diff --git a/routers/web/org/org.go b/routers/web/org/org.go
new file mode 100644
index 0000000..dd3aab4
--- /dev/null
+++ b/routers/web/org/org.go
@@ -0,0 +1,80 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ // tplCreateOrg template path for create organization
+ tplCreateOrg base.TplName = "org/create"
+)
+
+// Create render the page for create organization
+func Create(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("new_org.title")
+ ctx.Data["DefaultOrgVisibilityMode"] = setting.Service.DefaultOrgVisibilityMode
+ if !ctx.Doer.CanCreateOrganization() {
+ ctx.ServerError("Not allowed", errors.New(ctx.Locale.TrString("org.form.create_org_not_allowed")))
+ return
+ }
+ ctx.HTML(http.StatusOK, tplCreateOrg)
+}
+
+// CreatePost response for create organization
+func CreatePost(ctx *context.Context) {
+ form := *web.GetForm(ctx).(*forms.CreateOrgForm)
+ ctx.Data["Title"] = ctx.Tr("new_org.title")
+
+ if !ctx.Doer.CanCreateOrganization() {
+ ctx.ServerError("Not allowed", errors.New(ctx.Locale.TrString("org.form.create_org_not_allowed")))
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplCreateOrg)
+ return
+ }
+
+ org := &organization.Organization{
+ Name: form.OrgName,
+ IsActive: true,
+ Type: user_model.UserTypeOrganization,
+ Visibility: form.Visibility,
+ RepoAdminChangeTeamAccess: form.RepoAdminChangeTeamAccess,
+ }
+
+ if err := organization.CreateOrganization(ctx, org, ctx.Doer); err != nil {
+ ctx.Data["Err_OrgName"] = true
+ switch {
+ case user_model.IsErrUserAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("form.org_name_been_taken"), tplCreateOrg, &form)
+ case db.IsErrNameReserved(err):
+ ctx.RenderWithErr(ctx.Tr("org.form.name_reserved", err.(db.ErrNameReserved).Name), tplCreateOrg, &form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.RenderWithErr(ctx.Tr("org.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplCreateOrg, &form)
+ case organization.IsErrUserNotAllowedCreateOrg(err):
+ ctx.RenderWithErr(ctx.Tr("org.form.create_org_not_allowed"), tplCreateOrg, &form)
+ default:
+ ctx.ServerError("CreateOrganization", err)
+ }
+ return
+ }
+ log.Trace("Organization created: %s", org.Name)
+
+ ctx.Redirect(org.AsUser().DashboardLink())
+}
diff --git a/routers/web/org/org_labels.go b/routers/web/org/org_labels.go
new file mode 100644
index 0000000..02eae80
--- /dev/null
+++ b/routers/web/org/org_labels.go
@@ -0,0 +1,116 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/label"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// RetrieveLabels find all the labels of an organization
+func RetrieveLabels(ctx *context.Context) {
+ labels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Org.Organization.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("RetrieveLabels.GetLabels", err)
+ return
+ }
+ for _, l := range labels {
+ l.CalOpenIssues()
+ }
+ ctx.Data["Labels"] = labels
+ ctx.Data["NumLabels"] = len(labels)
+ ctx.Data["SortType"] = ctx.FormString("sort")
+}
+
+// NewLabel create new label for organization
+func NewLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsLabels"] = true
+ ctx.Data["PageIsOrgSettings"] = true
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/labels")
+ return
+ }
+
+ l := &issues_model.Label{
+ OrgID: ctx.Org.Organization.ID,
+ Name: form.Title,
+ Exclusive: form.Exclusive,
+ Description: form.Description,
+ Color: form.Color,
+ }
+ if err := issues_model.NewLabel(ctx, l); err != nil {
+ ctx.ServerError("NewLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/labels")
+}
+
+// UpdateLabel update a label's name and color
+func UpdateLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ l, err := issues_model.GetLabelInOrgByID(ctx, ctx.Org.Organization.ID, form.ID)
+ if err != nil {
+ switch {
+ case issues_model.IsErrOrgLabelNotExist(err):
+ ctx.Error(http.StatusNotFound)
+ default:
+ ctx.ServerError("UpdateLabel", err)
+ }
+ return
+ }
+
+ l.Name = form.Title
+ l.Exclusive = form.Exclusive
+ l.Description = form.Description
+ l.Color = form.Color
+ l.SetArchived(form.IsArchived)
+ if err := issues_model.UpdateLabel(ctx, l); err != nil {
+ ctx.ServerError("UpdateLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/labels")
+}
+
+// DeleteLabel delete a label
+func DeleteLabel(ctx *context.Context) {
+ if err := issues_model.DeleteLabel(ctx, ctx.Org.Organization.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteLabel: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.issues.label_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/settings/labels")
+}
+
+// InitializeLabels init labels for an organization
+func InitializeLabels(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.InitializeLabelsForm)
+ if ctx.HasError() {
+ ctx.Redirect(ctx.Org.OrgLink + "/labels")
+ return
+ }
+
+ if err := repo_module.InitializeLabels(ctx, ctx.Org.Organization.ID, form.TemplateName, true); err != nil {
+ if label.IsErrTemplateLoad(err) {
+ originalErr := err.(label.ErrTemplateLoad).OriginalError
+ ctx.Flash.Error(ctx.Tr("repo.issues.label_templates.fail_to_load_file", form.TemplateName, originalErr))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/labels")
+ return
+ }
+ ctx.ServerError("InitializeLabels", err)
+ return
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/labels")
+}
diff --git a/routers/web/org/projects.go b/routers/web/org/projects.go
new file mode 100644
index 0000000..64d233f
--- /dev/null
+++ b/routers/web/org/projects.go
@@ -0,0 +1,610 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ project_model "code.gitea.io/gitea/models/project"
+ attachment_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/web"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplProjects base.TplName = "org/projects/list"
+ tplProjectsNew base.TplName = "org/projects/new"
+ tplProjectsView base.TplName = "org/projects/view"
+)
+
+// MustEnableProjects check if projects are enabled in settings
+func MustEnableProjects(ctx *context.Context) {
+ if unit.TypeProjects.UnitGlobalDisabled() {
+ ctx.NotFound("EnableProjects", nil)
+ return
+ }
+}
+
+// Projects renders the home page of projects
+func Projects(ctx *context.Context) {
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ ctx.Data["Title"] = ctx.Tr("repo.projects")
+
+ sortType := ctx.FormTrim("sort")
+
+ isShowClosed := strings.ToLower(ctx.FormTrim("state")) == "closed"
+ keyword := ctx.FormTrim("q")
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ var projectType project_model.Type
+ if ctx.ContextUser.IsOrganization() {
+ projectType = project_model.TypeOrganization
+ } else {
+ projectType = project_model.TypeIndividual
+ }
+ projects, total, err := db.FindAndCount[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ OwnerID: ctx.ContextUser.ID,
+ IsClosed: optional.Some(isShowClosed),
+ OrderBy: project_model.GetSearchOrderByBySortType(sortType),
+ Type: projectType,
+ Title: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("FindProjects", err)
+ return
+ }
+
+ opTotal, err := db.Count[project_model.Project](ctx, project_model.SearchOptions{
+ OwnerID: ctx.ContextUser.ID,
+ IsClosed: optional.Some(!isShowClosed),
+ Type: projectType,
+ })
+ if err != nil {
+ ctx.ServerError("CountProjects", err)
+ return
+ }
+
+ if isShowClosed {
+ ctx.Data["OpenCount"] = opTotal
+ ctx.Data["ClosedCount"] = total
+ } else {
+ ctx.Data["OpenCount"] = total
+ ctx.Data["ClosedCount"] = opTotal
+ }
+
+ ctx.Data["Projects"] = projects
+ shared_user.RenderUserHeader(ctx)
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ for _, project := range projects {
+ project.RenderedContent = templates.RenderMarkdownToHtml(ctx, project.Description)
+ }
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ numPages := 0
+ if total > 0 {
+ numPages = (int(total) - 1/setting.UI.IssuePagingNum)
+ }
+
+ pager := context.NewPagination(int(total), setting.UI.IssuePagingNum, page, numPages)
+ pager.AddParam(ctx, "state", "State")
+ ctx.Data["Page"] = pager
+
+ ctx.Data["CanWriteProjects"] = canWriteProjects(ctx)
+ ctx.Data["IsShowClosed"] = isShowClosed
+ ctx.Data["PageIsViewProjects"] = true
+ ctx.Data["SortType"] = sortType
+
+ ctx.HTML(http.StatusOK, tplProjects)
+}
+
+func canWriteProjects(ctx *context.Context) bool {
+ if ctx.ContextUser.IsOrganization() {
+ return ctx.Org.CanWriteUnit(ctx, unit.TypeProjects)
+ }
+ return ctx.Doer != nil && ctx.ContextUser.ID == ctx.Doer.ID
+}
+
+// RenderNewProject render creating a project page
+func RenderNewProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+ ctx.Data["TemplateConfigs"] = project_model.GetTemplateConfigs()
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CanWriteProjects"] = canWriteProjects(ctx)
+ ctx.Data["PageIsViewProjects"] = true
+ ctx.Data["HomeLink"] = ctx.ContextUser.HomeLink()
+ ctx.Data["CancelLink"] = ctx.ContextUser.HomeLink() + "/-/projects"
+ shared_user.RenderUserHeader(ctx)
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// NewProjectPost creates a new project
+func NewProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+ shared_user.RenderUserHeader(ctx)
+
+ if ctx.HasError() {
+ RenderNewProject(ctx)
+ return
+ }
+
+ newProject := project_model.Project{
+ OwnerID: ctx.ContextUser.ID,
+ Title: form.Title,
+ Description: form.Content,
+ CreatorID: ctx.Doer.ID,
+ TemplateType: form.TemplateType,
+ CardType: form.CardType,
+ }
+
+ if ctx.ContextUser.IsOrganization() {
+ newProject.Type = project_model.TypeOrganization
+ } else {
+ newProject.Type = project_model.TypeIndividual
+ }
+
+ if err := project_model.NewProject(ctx, &newProject); err != nil {
+ ctx.ServerError("NewProject", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.create_success", form.Title))
+ ctx.Redirect(ctx.ContextUser.HomeLink() + "/-/projects")
+}
+
+// ChangeProjectStatus updates the status of a project between "open" and "close"
+func ChangeProjectStatus(ctx *context.Context) {
+ var toClose bool
+ switch ctx.Params(":action") {
+ case "open":
+ toClose = false
+ case "close":
+ toClose = true
+ default:
+ ctx.JSONRedirect(ctx.ContextUser.HomeLink() + "/-/projects")
+ return
+ }
+ id := ctx.ParamsInt64(":id")
+
+ if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, 0, id, toClose); err != nil {
+ ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ ctx.JSONRedirect(fmt.Sprintf("%s/-/projects/%d", ctx.ContextUser.HomeLink(), id))
+}
+
+// DeleteProject delete a project
+func DeleteProject(ctx *context.Context) {
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if p.OwnerID != ctx.ContextUser.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ if err := project_model.DeleteProjectByID(ctx, p.ID); err != nil {
+ ctx.Flash.Error("DeleteProjectByID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.projects.deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.ContextUser.HomeLink() + "/-/projects")
+}
+
+// RenderEditProject allows a project to be edited
+func RenderEditProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["PageIsViewProjects"] = true
+ ctx.Data["CanWriteProjects"] = canWriteProjects(ctx)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+
+ shared_user.RenderUserHeader(ctx)
+
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if p.OwnerID != ctx.ContextUser.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ctx.Data["projectID"] = p.ID
+ ctx.Data["title"] = p.Title
+ ctx.Data["content"] = p.Description
+ ctx.Data["redirect"] = ctx.FormString("redirect")
+ ctx.Data["HomeLink"] = ctx.ContextUser.HomeLink()
+ ctx.Data["card_type"] = p.CardType
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/-/projects/%d", ctx.ContextUser.HomeLink(), p.ID)
+
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// EditProjectPost response for editing a project
+func EditProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ projectID := ctx.ParamsInt64(":id")
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["PageIsViewProjects"] = true
+ ctx.Data["CanWriteProjects"] = canWriteProjects(ctx)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/-/projects/%d", ctx.ContextUser.HomeLink(), projectID)
+
+ shared_user.RenderUserHeader(ctx)
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+ return
+ }
+
+ p, err := project_model.GetProjectByID(ctx, projectID)
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if p.OwnerID != ctx.ContextUser.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ p.Title = form.Title
+ p.Description = form.Content
+ p.CardType = form.CardType
+ if err = project_model.UpdateProject(ctx, p); err != nil {
+ ctx.ServerError("UpdateProjects", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.edit_success", p.Title))
+ if ctx.FormString("redirect") == "project" {
+ ctx.Redirect(p.Link(ctx))
+ } else {
+ ctx.Redirect(ctx.ContextUser.HomeLink() + "/-/projects")
+ }
+}
+
+// ViewProject renders the project with board view for a project
+func ViewProject(ctx *context.Context) {
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if project.OwnerID != ctx.ContextUser.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ columns, err := project.GetColumns(ctx)
+ if err != nil {
+ ctx.ServerError("GetProjectColumns", err)
+ return
+ }
+
+ issuesMap, err := issues_model.LoadIssuesFromColumnList(ctx, columns)
+ if err != nil {
+ ctx.ServerError("LoadIssuesOfColumns", err)
+ return
+ }
+
+ if project.CardType != project_model.CardTypeTextOnly {
+ issuesAttachmentMap := make(map[int64][]*attachment_model.Attachment)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ if issueAttachment, err := attachment_model.GetAttachmentsByIssueIDImagesLatest(ctx, issue.ID); err == nil {
+ issuesAttachmentMap[issue.ID] = issueAttachment
+ }
+ }
+ }
+ ctx.Data["issuesAttachmentMap"] = issuesAttachmentMap
+ }
+
+ linkedPrsMap := make(map[int64][]*issues_model.Issue)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ var referencedIDs []int64
+ for _, comment := range issue.Comments {
+ if comment.RefIssueID != 0 && comment.RefIsPull {
+ referencedIDs = append(referencedIDs, comment.RefIssueID)
+ }
+ }
+
+ if len(referencedIDs) > 0 {
+ if linkedPrs, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ IssueIDs: referencedIDs,
+ IsPull: optional.Some(true),
+ }); err == nil {
+ linkedPrsMap[issue.ID] = linkedPrs
+ }
+ }
+ }
+ }
+
+ project.RenderedContent = templates.RenderMarkdownToHtml(ctx, project.Description)
+ ctx.Data["LinkedPRs"] = linkedPrsMap
+ ctx.Data["PageIsViewProjects"] = true
+ ctx.Data["CanWriteProjects"] = canWriteProjects(ctx)
+ ctx.Data["Project"] = project
+ ctx.Data["IssuesMap"] = issuesMap
+ ctx.Data["Columns"] = columns
+ shared_user.RenderUserHeader(ctx)
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplProjectsView)
+}
+
+// DeleteProjectColumn allows for the deletion of a project column
+func DeleteProjectColumn(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+
+ pb, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return
+ }
+ if pb.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", pb.ID, project.ID),
+ })
+ return
+ }
+
+ if project.OwnerID != ctx.ContextUser.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Owner[%d] as expected", pb.ID, ctx.ContextUser.ID),
+ })
+ return
+ }
+
+ if err := project_model.DeleteColumnByID(ctx, ctx.ParamsInt64(":columnID")); err != nil {
+ ctx.ServerError("DeleteProjectColumnByID", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// AddColumnToProjectPost allows a new column to be added to a project.
+func AddColumnToProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+
+ if err := project_model.NewColumn(ctx, &project_model.Column{
+ ProjectID: project.ID,
+ Title: form.Title,
+ Color: form.Color,
+ CreatorID: ctx.Doer.ID,
+ }); err != nil {
+ ctx.ServerError("NewProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// CheckProjectColumnChangePermissions check permission
+func CheckProjectColumnChangePermissions(ctx *context.Context) (*project_model.Project, *project_model.Column) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return nil, nil
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return nil, nil
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return nil, nil
+ }
+ if column.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", column.ID, project.ID),
+ })
+ return nil, nil
+ }
+
+ if project.OwnerID != ctx.ContextUser.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", column.ID, project.ID),
+ })
+ return nil, nil
+ }
+ return project, column
+}
+
+// EditProjectColumn allows a project column's to be updated
+func EditProjectColumn(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+ _, column := CheckProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if form.Title != "" {
+ column.Title = form.Title
+ }
+ column.Color = form.Color
+ if form.Sorting != 0 {
+ column.Sorting = form.Sorting
+ }
+
+ if err := project_model.UpdateColumn(ctx, column); err != nil {
+ ctx.ServerError("UpdateProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// SetDefaultProjectColumn set default column for uncategorized issues/pulls
+func SetDefaultProjectColumn(ctx *context.Context) {
+ project, column := CheckProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := project_model.SetDefaultColumn(ctx, project.ID, column.ID); err != nil {
+ ctx.ServerError("SetDefaultColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// MoveIssues moves or keeps issues in a column and sorts them inside that column
+func MoveIssues(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if project.OwnerID != ctx.ContextUser.ID {
+ ctx.NotFound("InvalidRepoID", nil)
+ return
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectColumn", project_model.IsErrProjectColumnNotExist, err)
+ return
+ }
+
+ if column.ProjectID != project.ID {
+ ctx.NotFound("ColumnNotInProject", nil)
+ return
+ }
+
+ type movedIssuesForm struct {
+ Issues []struct {
+ IssueID int64 `json:"issueID"`
+ Sorting int64 `json:"sorting"`
+ } `json:"issues"`
+ }
+
+ form := &movedIssuesForm{}
+ if err = json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.ServerError("DecodeMovedIssuesForm", err)
+ return
+ }
+
+ issueIDs := make([]int64, 0, len(form.Issues))
+ sortedIssueIDs := make(map[int64]int64)
+ for _, issue := range form.Issues {
+ issueIDs = append(issueIDs, issue.IssueID)
+ sortedIssueIDs[issue.Sorting] = issue.IssueID
+ }
+ movedIssues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ ctx.NotFoundOrServerError("GetIssueByID", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if len(movedIssues) != len(form.Issues) {
+ ctx.ServerError("some issues do not exist", errors.New("some issues do not exist"))
+ return
+ }
+
+ if _, err = movedIssues.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("LoadRepositories", err)
+ return
+ }
+
+ for _, issue := range movedIssues {
+ if issue.RepoID != project.RepoID && issue.Repo.OwnerID != project.OwnerID {
+ ctx.ServerError("Some issue's repoID is not equal to project's repoID", errors.New("Some issue's repoID is not equal to project's repoID"))
+ return
+ }
+ }
+
+ if err = project_model.MoveIssuesOnProjectColumn(ctx, column, sortedIssueIDs); err != nil {
+ ctx.ServerError("MoveIssuesOnProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/org/projects_test.go b/routers/web/org/projects_test.go
new file mode 100644
index 0000000..ab419cc
--- /dev/null
+++ b/routers/web/org/projects_test.go
@@ -0,0 +1,28 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org_test
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/routers/web/org"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCheckProjectColumnChangePermissions(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/-/projects/4/4")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.ContextUser = ctx.Doer // user2
+ ctx.SetParams(":id", "4")
+ ctx.SetParams(":columnID", "4")
+
+ project, column := org.CheckProjectColumnChangePermissions(ctx)
+ assert.NotNil(t, project)
+ assert.NotNil(t, column)
+ assert.False(t, ctx.Written())
+}
diff --git a/routers/web/org/setting.go b/routers/web/org/setting.go
new file mode 100644
index 0000000..0be734a
--- /dev/null
+++ b/routers/web/org/setting.go
@@ -0,0 +1,258 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "net/http"
+ "net/url"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ user_setting "code.gitea.io/gitea/routers/web/user/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ org_service "code.gitea.io/gitea/services/org"
+ repo_service "code.gitea.io/gitea/services/repository"
+ user_service "code.gitea.io/gitea/services/user"
+ webhook_service "code.gitea.io/gitea/services/webhook"
+)
+
+const (
+ // tplSettingsOptions template path for render settings
+ tplSettingsOptions base.TplName = "org/settings/options"
+ // tplSettingsDelete template path for render delete repository
+ tplSettingsDelete base.TplName = "org/settings/delete"
+ // tplSettingsHooks template path for render hook settings
+ tplSettingsHooks base.TplName = "org/settings/hooks"
+ // tplSettingsLabels template path for render labels settings
+ tplSettingsLabels base.TplName = "org/settings/labels"
+)
+
+// Settings render the main settings page
+func Settings(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("org.settings")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsOptions"] = true
+ ctx.Data["CurrentVisibility"] = ctx.Org.Organization.Visibility
+ ctx.Data["RepoAdminChangeTeamAccess"] = ctx.Org.Organization.RepoAdminChangeTeamAccess
+ ctx.Data["ContextUser"] = ctx.ContextUser
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+}
+
+// SettingsPost response for settings change submitted
+func SettingsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateOrgSettingForm)
+ ctx.Data["Title"] = ctx.Tr("org.settings")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsOptions"] = true
+ ctx.Data["CurrentVisibility"] = ctx.Org.Organization.Visibility
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+ return
+ }
+
+ org := ctx.Org.Organization
+
+ if org.Name != form.Name {
+ if err := user_service.RenameUser(ctx, org.AsUser(), form.Name); err != nil {
+ if user_model.IsErrUserAlreadyExist(err) {
+ ctx.Data["Err_Name"] = true
+ ctx.RenderWithErr(ctx.Tr("form.username_been_taken"), tplSettingsOptions, &form)
+ } else if db.IsErrNameReserved(err) {
+ ctx.Data["Err_Name"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplSettingsOptions, &form)
+ } else if db.IsErrNamePatternNotAllowed(err) {
+ ctx.Data["Err_Name"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplSettingsOptions, &form)
+ } else {
+ ctx.ServerError("RenameUser", err)
+ }
+ return
+ }
+
+ ctx.Org.OrgLink = setting.AppSubURL + "/org/" + url.PathEscape(org.Name)
+ }
+
+ if form.Email != "" {
+ if err := user_service.ReplacePrimaryEmailAddress(ctx, org.AsUser(), form.Email); err != nil {
+ ctx.Data["Err_Email"] = true
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplSettingsOptions, &form)
+ return
+ }
+ }
+
+ opts := &user_service.UpdateOptions{
+ FullName: optional.Some(form.FullName),
+ Description: optional.Some(form.Description),
+ Website: optional.Some(form.Website),
+ Location: optional.Some(form.Location),
+ Visibility: optional.Some(form.Visibility),
+ RepoAdminChangeTeamAccess: optional.Some(form.RepoAdminChangeTeamAccess),
+ }
+ if ctx.Doer.IsAdmin {
+ opts.MaxRepoCreation = optional.Some(form.MaxRepoCreation)
+ }
+
+ visibilityChanged := org.Visibility != form.Visibility
+
+ if err := user_service.UpdateUser(ctx, org.AsUser(), opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ // update forks visibility
+ if visibilityChanged {
+ repos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{
+ Actor: org.AsUser(), Private: true, ListOptions: db.ListOptions{Page: 1, PageSize: org.NumRepos},
+ })
+ if err != nil {
+ ctx.ServerError("GetRepositories", err)
+ return
+ }
+ for _, repo := range repos {
+ repo.OwnerName = org.Name
+ if err := repo_service.UpdateRepository(ctx, repo, true); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ }
+ }
+
+ log.Trace("Organization setting updated: %s", org.Name)
+ ctx.Flash.Success(ctx.Tr("org.settings.update_setting_success"))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings")
+}
+
+// SettingsAvatar response for change avatar on settings page
+func SettingsAvatar(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AvatarForm)
+ form.Source = forms.AvatarLocal
+ if err := user_setting.UpdateAvatarSetting(ctx, form, ctx.Org.Organization.AsUser()); err != nil {
+ ctx.Flash.Error(err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("org.settings.update_avatar_success"))
+ }
+
+ ctx.Redirect(ctx.Org.OrgLink + "/settings")
+}
+
+// SettingsDeleteAvatar response for delete avatar on settings page
+func SettingsDeleteAvatar(ctx *context.Context) {
+ if err := user_service.DeleteAvatar(ctx, ctx.Org.Organization.AsUser()); err != nil {
+ ctx.Flash.Error(err.Error())
+ }
+
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/settings")
+}
+
+// SettingsDelete response for deleting an organization
+func SettingsDelete(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("org.settings")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsDelete"] = true
+
+ if ctx.Req.Method == "POST" {
+ if ctx.Org.Organization.Name != ctx.FormString("org_name") {
+ ctx.Data["Err_OrgName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_org_name"), tplSettingsDelete, nil)
+ return
+ }
+
+ if err := org_service.DeleteOrganization(ctx, ctx.Org.Organization, false); err != nil {
+ if models.IsErrUserOwnRepos(err) {
+ ctx.Flash.Error(ctx.Tr("form.org_still_own_repo"))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/delete")
+ } else if models.IsErrUserOwnPackages(err) {
+ ctx.Flash.Error(ctx.Tr("form.org_still_own_packages"))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/delete")
+ } else {
+ ctx.ServerError("DeleteOrganization", err)
+ }
+ } else {
+ log.Trace("Organization deleted: %s", ctx.Org.Organization.Name)
+ ctx.Redirect(setting.AppSubURL + "/")
+ }
+ return
+ }
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsDelete)
+}
+
+// Webhooks render webhook list page
+func Webhooks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("org.settings")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["BaseLink"] = ctx.Org.OrgLink + "/settings/hooks"
+ ctx.Data["BaseLinkNew"] = ctx.Org.OrgLink + "/settings/hooks"
+ ctx.Data["WebhookList"] = webhook_service.List()
+ ctx.Data["Description"] = ctx.Tr("org.settings.hooks_desc")
+
+ ws, err := db.Find[webhook.Webhook](ctx, webhook.ListWebhookOptions{OwnerID: ctx.Org.Organization.ID})
+ if err != nil {
+ ctx.ServerError("ListWebhooksByOpts", err)
+ return
+ }
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.Data["Webhooks"] = ws
+ ctx.HTML(http.StatusOK, tplSettingsHooks)
+}
+
+// DeleteWebhook response for delete webhook
+func DeleteWebhook(ctx *context.Context) {
+ if err := webhook.DeleteWebhookByOwnerID(ctx, ctx.Org.Organization.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteWebhookByOwnerID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/settings/hooks")
+}
+
+// Labels render organization labels page
+func Labels(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsOrgSettingsLabels"] = true
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsLabels)
+}
diff --git a/routers/web/org/setting/blocked_users.go b/routers/web/org/setting/blocked_users.go
new file mode 100644
index 0000000..0c7f245
--- /dev/null
+++ b/routers/web/org/setting/blocked_users.go
@@ -0,0 +1,85 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+const tplBlockedUsers = "org/settings/blocked_users"
+
+// BlockedUsers renders the blocked users page.
+func BlockedUsers(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.blocked_users")
+ ctx.Data["PageIsSettingsBlockedUsers"] = true
+
+ blockedUsers, err := user_model.ListBlockedUsers(ctx, ctx.Org.Organization.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("ListBlockedUsers", err)
+ return
+ }
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.Data["BlockedUsers"] = blockedUsers
+
+ ctx.HTML(http.StatusOK, tplBlockedUsers)
+}
+
+// BlockedUsersBlock blocks a particular user from the organization.
+func BlockedUsersBlock(ctx *context.Context) {
+ uname := strings.ToLower(ctx.FormString("uname"))
+ u, err := user_model.GetUserByName(ctx, uname)
+ if err != nil {
+ ctx.ServerError("GetUserByName", err)
+ return
+ }
+
+ if u.IsOrganization() {
+ ctx.ServerError("IsOrganization", fmt.Errorf("%s is an organization not a user", u.Name))
+ return
+ }
+
+ if err := user_service.BlockUser(ctx, ctx.Org.Organization.ID, u.ID); err != nil {
+ ctx.ServerError("BlockUser", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.user_block_success"))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/blocked_users")
+}
+
+// BlockedUsersUnblock unblocks a particular user from the organization.
+func BlockedUsersUnblock(ctx *context.Context) {
+ u, err := user_model.GetUserByID(ctx, ctx.FormInt64("user_id"))
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ if u.IsOrganization() {
+ ctx.ServerError("IsOrganization", fmt.Errorf("%s is an organization not a user", u.Name))
+ return
+ }
+
+ if err := user_model.UnblockUser(ctx, ctx.Org.Organization.ID, u.ID); err != nil {
+ ctx.ServerError("UnblockUser", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.user_unblock_success"))
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/blocked_users")
+}
diff --git a/routers/web/org/setting/runners.go b/routers/web/org/setting/runners.go
new file mode 100644
index 0000000..fe05709
--- /dev/null
+++ b/routers/web/org/setting/runners.go
@@ -0,0 +1,12 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/services/context"
+)
+
+func RedirectToDefaultSetting(ctx *context.Context) {
+ ctx.Redirect(ctx.Org.OrgLink + "/settings/actions/runners")
+}
diff --git a/routers/web/org/setting_oauth2.go b/routers/web/org/setting_oauth2.go
new file mode 100644
index 0000000..7f85579
--- /dev/null
+++ b/routers/web/org/setting_oauth2.go
@@ -0,0 +1,102 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ user_setting "code.gitea.io/gitea/routers/web/user/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsApplications base.TplName = "org/settings/applications"
+ tplSettingsOAuthApplicationEdit base.TplName = "org/settings/applications_oauth2_edit"
+)
+
+func newOAuth2CommonHandlers(org *context.Organization) *user_setting.OAuth2CommonHandlers {
+ return &user_setting.OAuth2CommonHandlers{
+ OwnerID: org.Organization.ID,
+ BasePathList: fmt.Sprintf("%s/org/%s/settings/applications", setting.AppSubURL, org.Organization.Name),
+ BasePathEditPrefix: fmt.Sprintf("%s/org/%s/settings/applications/oauth2", setting.AppSubURL, org.Organization.Name),
+ TplAppEdit: tplSettingsOAuthApplicationEdit,
+ }
+}
+
+// Applications render org applications page (for org, at the moment, there are only OAuth2 applications)
+func Applications(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ apps, err := db.Find[auth.OAuth2Application](ctx, auth.FindOAuth2ApplicationsOptions{
+ OwnerID: ctx.Org.Organization.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetOAuth2ApplicationsByUserID", err)
+ return
+ }
+ ctx.Data["Applications"] = apps
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsApplications)
+}
+
+// OAuthApplicationsPost response for adding an oauth2 application
+func OAuthApplicationsPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Org)
+ oa.AddApp(ctx)
+}
+
+// OAuth2ApplicationShow displays the given application
+func OAuth2ApplicationShow(ctx *context.Context) {
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Org)
+ oa.EditShow(ctx)
+}
+
+// OAuth2ApplicationEdit response for editing oauth2 application
+func OAuth2ApplicationEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Org)
+ oa.EditSave(ctx)
+}
+
+// OAuthApplicationsRegenerateSecret handles the post request for regenerating the secret
+func OAuthApplicationsRegenerateSecret(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Org)
+ oa.RegenerateSecret(ctx)
+}
+
+// DeleteOAuth2Application deletes the given oauth2 application
+func DeleteOAuth2Application(ctx *context.Context) {
+ oa := newOAuth2CommonHandlers(ctx.Org)
+ oa.DeleteApp(ctx)
+}
+
+// TODO: revokes the grant with the given id
diff --git a/routers/web/org/setting_packages.go b/routers/web/org/setting_packages.go
new file mode 100644
index 0000000..af9836e
--- /dev/null
+++ b/routers/web/org/setting_packages.go
@@ -0,0 +1,131 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared "code.gitea.io/gitea/routers/web/shared/packages"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsPackages base.TplName = "org/settings/packages"
+ tplSettingsPackagesRuleEdit base.TplName = "org/settings/packages_cleanup_rules_edit"
+ tplSettingsPackagesRulePreview base.TplName = "org/settings/packages_cleanup_rules_preview"
+)
+
+func Packages(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ shared.SetPackagesContext(ctx, ctx.ContextUser)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackages)
+}
+
+func PackagesRuleAdd(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ shared.SetRuleAddContext(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRuleEdit)
+}
+
+func PackagesRuleEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ shared.SetRuleEditContext(ctx, ctx.ContextUser)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRuleEdit)
+}
+
+func PackagesRuleAddPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.PerformRuleAddPost(
+ ctx,
+ ctx.ContextUser,
+ fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name),
+ tplSettingsPackagesRuleEdit,
+ )
+}
+
+func PackagesRuleEditPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.PerformRuleEditPost(
+ ctx,
+ ctx.ContextUser,
+ fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name),
+ tplSettingsPackagesRuleEdit,
+ )
+}
+
+func PackagesRulePreview(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ shared.SetRulePreviewContext(ctx, ctx.ContextUser)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
+}
+
+func InitializeCargoIndex(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.InitializeCargoIndex(ctx, ctx.ContextUser)
+
+ ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
+}
+
+func RebuildCargoIndex(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsOrgSettings"] = true
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.RebuildCargoIndex(ctx, ctx.ContextUser)
+
+ ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name))
+}
diff --git a/routers/web/org/teams.go b/routers/web/org/teams.go
new file mode 100644
index 0000000..45c3674
--- /dev/null
+++ b/routers/web/org/teams.go
@@ -0,0 +1,628 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package org
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "path"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ org_service "code.gitea.io/gitea/services/org"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ // tplTeams template path for teams list page
+ tplTeams base.TplName = "org/team/teams"
+ // tplTeamNew template path for create new team page
+ tplTeamNew base.TplName = "org/team/new"
+ // tplTeamMembers template path for showing team members page
+ tplTeamMembers base.TplName = "org/team/members"
+ // tplTeamRepositories template path for showing team repositories page
+ tplTeamRepositories base.TplName = "org/team/repositories"
+ // tplTeamInvite template path for team invites page
+ tplTeamInvite base.TplName = "org/team/invite"
+)
+
+// Teams render teams list page
+func Teams(ctx *context.Context) {
+ org := ctx.Org.Organization
+ ctx.Data["Title"] = org.FullName
+ ctx.Data["PageIsOrgTeams"] = true
+
+ for _, t := range ctx.Org.Teams {
+ if err := t.LoadMembers(ctx); err != nil {
+ ctx.ServerError("GetMembers", err)
+ return
+ }
+ }
+ ctx.Data["Teams"] = ctx.Org.Teams
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplTeams)
+}
+
+// TeamsAction response for join, leave, remove, add operations to team
+func TeamsAction(ctx *context.Context) {
+ page := ctx.FormString("page")
+ var err error
+ switch ctx.Params(":action") {
+ case "join":
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ err = models.AddTeamMember(ctx, ctx.Org.Team, ctx.Doer.ID)
+ case "leave":
+ err = models.RemoveTeamMember(ctx, ctx.Org.Team, ctx.Doer.ID)
+ if err != nil {
+ if org_model.IsErrLastOrgOwner(err) {
+ ctx.Flash.Error(ctx.Tr("form.last_org_owner"))
+ } else {
+ log.Error("Action(%s): %v", ctx.Params(":action"), err)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": false,
+ "err": err.Error(),
+ })
+ return
+ }
+ }
+ checkIsOrgMemberAndRedirect(ctx, ctx.Org.OrgLink+"/teams/")
+ return
+ case "remove":
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ uid := ctx.FormInt64("uid")
+ if uid == 0 {
+ ctx.Redirect(ctx.Org.OrgLink + "/teams")
+ return
+ }
+
+ err = models.RemoveTeamMember(ctx, ctx.Org.Team, uid)
+ if err != nil {
+ if org_model.IsErrLastOrgOwner(err) {
+ ctx.Flash.Error(ctx.Tr("form.last_org_owner"))
+ } else {
+ log.Error("Action(%s): %v", ctx.Params(":action"), err)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": false,
+ "err": err.Error(),
+ })
+ return
+ }
+ }
+ checkIsOrgMemberAndRedirect(ctx, ctx.Org.OrgLink+"/teams/"+url.PathEscape(ctx.Org.Team.LowerName))
+ return
+ case "add":
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ uname := strings.ToLower(ctx.FormString("uname"))
+ var u *user_model.User
+ u, err = user_model.GetUserByName(ctx, uname)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ if setting.MailService != nil && user_model.ValidateEmail(uname) == nil {
+ if err := org_service.CreateTeamInvite(ctx, ctx.Doer, ctx.Org.Team, uname); err != nil {
+ if org_model.IsErrTeamInviteAlreadyExist(err) {
+ ctx.Flash.Error(ctx.Tr("form.duplicate_invite_to_team"))
+ } else if org_model.IsErrUserEmailAlreadyAdded(err) {
+ ctx.Flash.Error(ctx.Tr("org.teams.add_duplicate_users"))
+ } else {
+ ctx.ServerError("CreateTeamInvite", err)
+ return
+ }
+ }
+ } else {
+ ctx.Flash.Error(ctx.Tr("form.user_not_exist"))
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName))
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return
+ }
+
+ if u.IsOrganization() {
+ ctx.Flash.Error(ctx.Tr("form.cannot_add_org_to_team"))
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName))
+ return
+ }
+
+ if ctx.Org.Team.IsMember(ctx, u.ID) {
+ ctx.Flash.Error(ctx.Tr("org.teams.add_duplicate_users"))
+ } else {
+ err = models.AddTeamMember(ctx, ctx.Org.Team, u.ID)
+ }
+
+ page = "team"
+ case "remove_invite":
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ iid := ctx.FormInt64("iid")
+ if iid == 0 {
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName))
+ return
+ }
+
+ if err := org_model.RemoveInviteByID(ctx, iid, ctx.Org.Team.ID); err != nil {
+ log.Error("Action(%s): %v", ctx.Params(":action"), err)
+ ctx.ServerError("RemoveInviteByID", err)
+ return
+ }
+
+ page = "team"
+ }
+
+ if err != nil {
+ if org_model.IsErrLastOrgOwner(err) {
+ ctx.Flash.Error(ctx.Tr("form.last_org_owner"))
+ } else {
+ log.Error("Action(%s): %v", ctx.Params(":action"), err)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": false,
+ "err": err.Error(),
+ })
+ return
+ }
+ }
+
+ switch page {
+ case "team":
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName))
+ case "home":
+ ctx.Redirect(ctx.Org.Organization.AsUser().HomeLink())
+ default:
+ ctx.Redirect(ctx.Org.OrgLink + "/teams")
+ }
+}
+
+func checkIsOrgMemberAndRedirect(ctx *context.Context, defaultRedirect string) {
+ if isOrgMember, err := org_model.IsOrganizationMember(ctx, ctx.Org.Organization.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("IsOrganizationMember", err)
+ return
+ } else if !isOrgMember {
+ if ctx.Org.Organization.Visibility.IsPrivate() {
+ defaultRedirect = setting.AppSubURL + "/"
+ } else {
+ defaultRedirect = ctx.Org.Organization.HomeLink()
+ }
+ }
+ ctx.JSONRedirect(defaultRedirect)
+}
+
+// TeamsRepoAction operate team's repository
+func TeamsRepoAction(ctx *context.Context) {
+ if !ctx.Org.IsOwner {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ var err error
+ action := ctx.Params(":action")
+ switch action {
+ case "add":
+ repoName := path.Base(ctx.FormString("repo_name"))
+ var repo *repo_model.Repository
+ repo, err = repo_model.GetRepositoryByName(ctx, ctx.Org.Organization.ID, repoName)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("org.teams.add_nonexistent_repo"))
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName) + "/repositories")
+ return
+ }
+ ctx.ServerError("GetRepositoryByName", err)
+ return
+ }
+ err = org_service.TeamAddRepository(ctx, ctx.Org.Team, repo)
+ case "remove":
+ err = repo_service.RemoveRepositoryFromTeam(ctx, ctx.Org.Team, ctx.FormInt64("repoid"))
+ case "addall":
+ err = models.AddAllRepositories(ctx, ctx.Org.Team)
+ case "removeall":
+ err = models.RemoveAllRepositories(ctx, ctx.Org.Team)
+ }
+
+ if err != nil {
+ log.Error("Action(%s): '%s' %v", ctx.Params(":action"), ctx.Org.Team.Name, err)
+ ctx.ServerError("TeamsRepoAction", err)
+ return
+ }
+
+ if action == "addall" || action == "removeall" {
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName) + "/repositories")
+ return
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(ctx.Org.Team.LowerName) + "/repositories")
+}
+
+// NewTeam render create new team page
+func NewTeam(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Org.Organization.FullName
+ ctx.Data["PageIsOrgTeams"] = true
+ ctx.Data["PageIsOrgTeamsNew"] = true
+ ctx.Data["Team"] = &org_model.Team{}
+ ctx.Data["Units"] = unit_model.Units
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+ ctx.HTML(http.StatusOK, tplTeamNew)
+}
+
+func getUnitPerms(forms url.Values, teamPermission perm.AccessMode) map[unit_model.Type]perm.AccessMode {
+ unitPerms := make(map[unit_model.Type]perm.AccessMode)
+ for _, ut := range unit_model.AllRepoUnitTypes {
+ // Default accessmode is none
+ unitPerms[ut] = perm.AccessModeNone
+
+ v, ok := forms[fmt.Sprintf("unit_%d", ut)]
+ if ok {
+ vv, _ := strconv.Atoi(v[0])
+ if teamPermission >= perm.AccessModeAdmin {
+ unitPerms[ut] = teamPermission
+ // Don't allow `TypeExternal{Tracker,Wiki}` to influence this as they can only be set to READ perms.
+ if ut == unit_model.TypeExternalTracker || ut == unit_model.TypeExternalWiki {
+ unitPerms[ut] = perm.AccessModeRead
+ }
+ } else {
+ unitPerms[ut] = perm.AccessMode(vv)
+ if unitPerms[ut] >= perm.AccessModeAdmin {
+ unitPerms[ut] = perm.AccessModeWrite
+ }
+ }
+ }
+ }
+ return unitPerms
+}
+
+// NewTeamPost response for create new team
+func NewTeamPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateTeamForm)
+ includesAllRepositories := form.RepoAccess == "all"
+ p := perm.ParseAccessMode(form.Permission)
+ unitPerms := getUnitPerms(ctx.Req.Form, p)
+ if p < perm.AccessModeAdmin {
+ // if p is less than admin accessmode, then it should be general accessmode,
+ // so we should calculate the minial accessmode from units accessmodes.
+ p = unit_model.MinUnitAccessMode(unitPerms)
+ }
+
+ t := &org_model.Team{
+ OrgID: ctx.Org.Organization.ID,
+ Name: form.TeamName,
+ Description: form.Description,
+ AccessMode: p,
+ IncludesAllRepositories: includesAllRepositories,
+ CanCreateOrgRepo: form.CanCreateOrgRepo,
+ }
+
+ units := make([]*org_model.TeamUnit, 0, len(unitPerms))
+ for tp, perm := range unitPerms {
+ units = append(units, &org_model.TeamUnit{
+ OrgID: ctx.Org.Organization.ID,
+ Type: tp,
+ AccessMode: perm,
+ })
+ }
+ t.Units = units
+
+ ctx.Data["Title"] = ctx.Org.Organization.FullName
+ ctx.Data["PageIsOrgTeams"] = true
+ ctx.Data["PageIsOrgTeamsNew"] = true
+ ctx.Data["Units"] = unit_model.Units
+ ctx.Data["Team"] = t
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTeamNew)
+ return
+ }
+
+ if t.AccessMode < perm.AccessModeAdmin && len(unitPerms) == 0 {
+ ctx.RenderWithErr(ctx.Tr("form.team_no_units_error"), tplTeamNew, &form)
+ return
+ }
+
+ if err := models.NewTeam(ctx, t); err != nil {
+ ctx.Data["Err_TeamName"] = true
+ switch {
+ case org_model.IsErrTeamAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("form.team_name_been_taken"), tplTeamNew, &form)
+ default:
+ ctx.ServerError("NewTeam", err)
+ }
+ return
+ }
+ log.Trace("Team created: %s/%s", ctx.Org.Organization.Name, t.Name)
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(t.LowerName))
+}
+
+// TeamMembers render team members page
+func TeamMembers(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Org.Team.Name
+ ctx.Data["PageIsOrgTeams"] = true
+ ctx.Data["PageIsOrgTeamMembers"] = true
+
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ if err := ctx.Org.Team.LoadMembers(ctx); err != nil {
+ ctx.ServerError("GetMembers", err)
+ return
+ }
+ ctx.Data["Units"] = unit_model.Units
+
+ invites, err := org_model.GetInvitesByTeamID(ctx, ctx.Org.Team.ID)
+ if err != nil {
+ ctx.ServerError("GetInvitesByTeamID", err)
+ return
+ }
+ ctx.Data["Invites"] = invites
+ ctx.Data["IsEmailInviteEnabled"] = setting.MailService != nil
+
+ ctx.HTML(http.StatusOK, tplTeamMembers)
+}
+
+// TeamRepositories show the repositories of team
+func TeamRepositories(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Org.Team.Name
+ ctx.Data["PageIsOrgTeams"] = true
+ ctx.Data["PageIsOrgTeamRepos"] = true
+
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ if err := ctx.Org.Team.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("GetRepositories", err)
+ return
+ }
+ ctx.Data["Units"] = unit_model.Units
+ ctx.HTML(http.StatusOK, tplTeamRepositories)
+}
+
+// SearchTeam api for searching teams
+func SearchTeam(ctx *context.Context) {
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ }
+
+ opts := &org_model.SearchTeamOptions{
+ // UserID is not set because the router already requires the doer to be an org admin. Thus, we don't need to restrict to teams that the user belongs in
+ Keyword: ctx.FormTrim("q"),
+ OrgID: ctx.Org.Organization.ID,
+ IncludeDesc: ctx.FormString("include_desc") == "" || ctx.FormBool("include_desc"),
+ ListOptions: listOptions,
+ }
+
+ teams, maxResults, err := org_model.SearchTeam(ctx, opts)
+ if err != nil {
+ log.Error("SearchTeam failed: %v", err)
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "ok": false,
+ "error": "SearchTeam internal failure",
+ })
+ return
+ }
+
+ apiTeams, err := convert.ToTeams(ctx, teams, false)
+ if err != nil {
+ log.Error("convert ToTeams failed: %v", err)
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "ok": false,
+ "error": "SearchTeam failed to get units",
+ })
+ return
+ }
+
+ ctx.SetTotalCountHeader(maxResults)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": true,
+ "data": apiTeams,
+ })
+}
+
+// EditTeam render team edit page
+func EditTeam(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Org.Organization.FullName
+ ctx.Data["PageIsOrgTeams"] = true
+ if err := ctx.Org.Team.LoadUnits(ctx); err != nil {
+ ctx.ServerError("LoadUnits", err)
+ return
+ }
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+ ctx.Data["Team"] = ctx.Org.Team
+ ctx.Data["Units"] = unit_model.Units
+ ctx.HTML(http.StatusOK, tplTeamNew)
+}
+
+// EditTeamPost response for modify team information
+func EditTeamPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateTeamForm)
+ t := ctx.Org.Team
+ newAccessMode := perm.ParseAccessMode(form.Permission)
+ unitPerms := getUnitPerms(ctx.Req.Form, newAccessMode)
+ if newAccessMode < perm.AccessModeAdmin {
+ // if newAccessMode is less than admin accessmode, then it should be general accessmode,
+ // so we should calculate the minial accessmode from units accessmodes.
+ newAccessMode = unit_model.MinUnitAccessMode(unitPerms)
+ }
+ isAuthChanged := false
+ isIncludeAllChanged := false
+ includesAllRepositories := form.RepoAccess == "all"
+
+ ctx.Data["Title"] = ctx.Org.Organization.FullName
+ ctx.Data["PageIsOrgTeams"] = true
+ ctx.Data["Team"] = t
+ ctx.Data["Units"] = unit_model.Units
+
+ if !t.IsOwnerTeam() {
+ t.Name = form.TeamName
+ if t.AccessMode != newAccessMode {
+ isAuthChanged = true
+ t.AccessMode = newAccessMode
+ }
+
+ if t.IncludesAllRepositories != includesAllRepositories {
+ isIncludeAllChanged = true
+ t.IncludesAllRepositories = includesAllRepositories
+ }
+ t.CanCreateOrgRepo = form.CanCreateOrgRepo
+
+ units := make([]*org_model.TeamUnit, 0, len(unitPerms))
+ for tp, perm := range unitPerms {
+ units = append(units, &org_model.TeamUnit{
+ OrgID: t.OrgID,
+ TeamID: t.ID,
+ Type: tp,
+ AccessMode: perm,
+ })
+ }
+ t.Units = units
+ } else {
+ t.CanCreateOrgRepo = true
+ }
+
+ t.Description = form.Description
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTeamNew)
+ return
+ }
+
+ if t.AccessMode < perm.AccessModeAdmin && len(unitPerms) == 0 {
+ ctx.RenderWithErr(ctx.Tr("form.team_no_units_error"), tplTeamNew, &form)
+ return
+ }
+
+ if err := models.UpdateTeam(ctx, t, isAuthChanged, isIncludeAllChanged); err != nil {
+ ctx.Data["Err_TeamName"] = true
+ switch {
+ case org_model.IsErrTeamAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("form.team_name_been_taken"), tplTeamNew, &form)
+ default:
+ ctx.ServerError("UpdateTeam", err)
+ }
+ return
+ }
+ ctx.Redirect(ctx.Org.OrgLink + "/teams/" + url.PathEscape(t.LowerName))
+}
+
+// DeleteTeam response for the delete team request
+func DeleteTeam(ctx *context.Context) {
+ if err := models.DeleteTeam(ctx, ctx.Org.Team); err != nil {
+ ctx.Flash.Error("DeleteTeam: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("org.teams.delete_team_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Org.OrgLink + "/teams")
+}
+
+// TeamInvite renders the team invite page
+func TeamInvite(ctx *context.Context) {
+ invite, org, team, inviter, err := getTeamInviteFromContext(ctx)
+ if err != nil {
+ if org_model.IsErrTeamInviteNotFound(err) {
+ ctx.NotFound("ErrTeamInviteNotFound", err)
+ } else {
+ ctx.ServerError("getTeamInviteFromContext", err)
+ }
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("org.teams.invite_team_member", team.Name)
+ ctx.Data["Invite"] = invite
+ ctx.Data["Organization"] = org
+ ctx.Data["Team"] = team
+ ctx.Data["Inviter"] = inviter
+
+ ctx.HTML(http.StatusOK, tplTeamInvite)
+}
+
+// TeamInvitePost handles the team invitation
+func TeamInvitePost(ctx *context.Context) {
+ invite, org, team, _, err := getTeamInviteFromContext(ctx)
+ if err != nil {
+ if org_model.IsErrTeamInviteNotFound(err) {
+ ctx.NotFound("ErrTeamInviteNotFound", err)
+ } else {
+ ctx.ServerError("getTeamInviteFromContext", err)
+ }
+ return
+ }
+
+ if err := models.AddTeamMember(ctx, team, ctx.Doer.ID); err != nil {
+ ctx.ServerError("AddTeamMember", err)
+ return
+ }
+
+ if err := org_model.RemoveInviteByID(ctx, invite.ID, team.ID); err != nil {
+ log.Error("RemoveInviteByID: %v", err)
+ }
+
+ ctx.Redirect(org.OrganisationLink() + "/teams/" + url.PathEscape(team.LowerName))
+}
+
+func getTeamInviteFromContext(ctx *context.Context) (*org_model.TeamInvite, *org_model.Organization, *org_model.Team, *user_model.User, error) {
+ invite, err := org_model.GetInviteByToken(ctx, ctx.Params("token"))
+ if err != nil {
+ return nil, nil, nil, nil, err
+ }
+
+ inviter, err := user_model.GetUserByID(ctx, invite.InviterID)
+ if err != nil {
+ return nil, nil, nil, nil, err
+ }
+
+ team, err := org_model.GetTeamByID(ctx, invite.TeamID)
+ if err != nil {
+ return nil, nil, nil, nil, err
+ }
+
+ org, err := user_model.GetUserByID(ctx, team.OrgID)
+ if err != nil {
+ return nil, nil, nil, nil, err
+ }
+
+ return invite, org_model.OrgFromUser(org), team, inviter, nil
+}
diff --git a/routers/web/repo/actions/actions.go b/routers/web/repo/actions/actions.go
new file mode 100644
index 0000000..ff3b161
--- /dev/null
+++ b/routers/web/repo/actions/actions.go
@@ -0,0 +1,247 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "slices"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+
+ "github.com/nektos/act/pkg/model"
+)
+
+const (
+ tplListActions base.TplName = "repo/actions/list"
+ tplViewActions base.TplName = "repo/actions/view"
+)
+
+type Workflow struct {
+ Entry git.TreeEntry
+ ErrMsg string
+}
+
+// MustEnableActions check if actions are enabled in settings
+func MustEnableActions(ctx *context.Context) {
+ if !setting.Actions.Enabled {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+
+ if unit.TypeActions.UnitGlobalDisabled() {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+
+ if ctx.Repo.Repository != nil {
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ ctx.NotFound("MustEnableActions", nil)
+ return
+ }
+ }
+}
+
+func List(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageIsActions"] = true
+
+ curWorkflow := ctx.FormString("workflow")
+ ctx.Data["CurWorkflow"] = curWorkflow
+
+ var workflows []Workflow
+ if empty, err := ctx.Repo.GitRepo.IsEmpty(); err != nil {
+ ctx.ServerError("IsEmpty", err)
+ return
+ } else if !empty {
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("GetBranchCommit", err)
+ return
+ }
+ entries, err := actions.ListWorkflows(commit)
+ if err != nil {
+ ctx.ServerError("ListWorkflows", err)
+ return
+ }
+
+ // Get all runner labels
+ runners, err := db.Find[actions_model.ActionRunner](ctx, actions_model.FindRunnerOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsOnline: optional.Some(true),
+ WithAvailable: true,
+ })
+ if err != nil {
+ ctx.ServerError("FindRunners", err)
+ return
+ }
+ allRunnerLabels := make(container.Set[string])
+ for _, r := range runners {
+ allRunnerLabels.AddMultiple(r.AgentLabels...)
+ }
+
+ canRun := ctx.Repo.CanWrite(unit.TypeActions)
+
+ workflows = make([]Workflow, 0, len(entries))
+ for _, entry := range entries {
+ workflow := Workflow{Entry: *entry}
+ content, err := actions.GetContentFromEntry(entry)
+ if err != nil {
+ ctx.ServerError("GetContentFromEntry", err)
+ return
+ }
+ wf, err := model.ReadWorkflow(bytes.NewReader(content))
+ if err != nil {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.invalid_workflow_helper", err.Error())
+ workflows = append(workflows, workflow)
+ continue
+ }
+ // The workflow must contain at least one job without "needs". Otherwise, a deadlock will occur and no jobs will be able to run.
+ hasJobWithoutNeeds := false
+ // Check whether have matching runner and a job without "needs"
+ emptyJobsNumber := 0
+ for _, j := range wf.Jobs {
+ if j == nil {
+ emptyJobsNumber++
+ continue
+ }
+ if !hasJobWithoutNeeds && len(j.Needs()) == 0 {
+ hasJobWithoutNeeds = true
+ }
+ runsOnList := j.RunsOn()
+ for _, ro := range runsOnList {
+ if strings.Contains(ro, "${{") {
+ // Skip if it contains expressions.
+ // The expressions could be very complex and could not be evaluated here,
+ // so just skip it, it's OK since it's just a tooltip message.
+ continue
+ }
+ if !allRunnerLabels.Contains(ro) {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_matching_online_runner_helper", ro)
+ break
+ }
+ }
+ if workflow.ErrMsg != "" {
+ break
+ }
+ }
+ if !hasJobWithoutNeeds {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_job_without_needs")
+ }
+ if emptyJobsNumber == len(wf.Jobs) {
+ workflow.ErrMsg = ctx.Locale.TrString("actions.runs.no_job")
+ }
+ workflows = append(workflows, workflow)
+
+ if canRun && workflow.Entry.Name() == curWorkflow {
+ config := wf.WorkflowDispatchConfig()
+ if config != nil {
+ keys := util.KeysOfMap(config.Inputs)
+ slices.Sort(keys)
+ if int64(len(config.Inputs)) > setting.Actions.LimitDispatchInputs {
+ keys = keys[:setting.Actions.LimitDispatchInputs]
+ }
+
+ ctx.Data["CurWorkflowDispatch"] = config
+ ctx.Data["CurWorkflowDispatchInputKeys"] = keys
+ ctx.Data["WarnDispatchInputsLimit"] = int64(len(config.Inputs)) > setting.Actions.LimitDispatchInputs
+ ctx.Data["DispatchInputsLimit"] = setting.Actions.LimitDispatchInputs
+ }
+ }
+ }
+ }
+ ctx.Data["workflows"] = workflows
+ ctx.Data["RepoLink"] = ctx.Repo.Repository.Link()
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ actorID := ctx.FormInt64("actor")
+ status := ctx.FormInt("status")
+
+ actionsConfig := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions).ActionsConfig()
+ ctx.Data["ActionsConfig"] = actionsConfig
+
+ if len(curWorkflow) > 0 && ctx.Repo.IsAdmin() {
+ ctx.Data["AllowDisableOrEnableWorkflow"] = true
+ ctx.Data["CurWorkflowDisabled"] = actionsConfig.IsWorkflowDisabled(curWorkflow)
+ }
+
+ // if status or actor query param is not given to frontend href, (href="/<repoLink>/actions")
+ // they will be 0 by default, which indicates get all status or actors
+ ctx.Data["CurActor"] = actorID
+ ctx.Data["CurStatus"] = status
+ if actorID > 0 || status > int(actions_model.StatusUnknown) {
+ ctx.Data["IsFiltered"] = true
+ }
+
+ opts := actions_model.FindRunOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ RepoID: ctx.Repo.Repository.ID,
+ WorkflowID: curWorkflow,
+ TriggerUserID: actorID,
+ }
+
+ // if status is not StatusUnknown, it means user has selected a status filter
+ if actions_model.Status(status) != actions_model.StatusUnknown {
+ opts.Status = []actions_model.Status{actions_model.Status(status)}
+ }
+
+ runs, total, err := db.FindAndCount[actions_model.ActionRun](ctx, opts)
+ if err != nil {
+ ctx.ServerError("FindAndCount", err)
+ return
+ }
+
+ for _, run := range runs {
+ run.Repo = ctx.Repo.Repository
+ }
+
+ if err := actions_model.RunList(runs).LoadTriggerUser(ctx); err != nil {
+ ctx.ServerError("LoadTriggerUser", err)
+ return
+ }
+
+ ctx.Data["Runs"] = runs
+
+ ctx.Data["Repo"] = ctx.Repo
+
+ actors, err := actions_model.GetActors(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetActors", err)
+ return
+ }
+ ctx.Data["Actors"] = repo.MakeSelfOnTop(ctx.Doer, actors)
+
+ ctx.Data["StatusInfoList"] = actions_model.GetStatusInfoList(ctx)
+
+ pager := context.NewPagination(int(total), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParamString("workflow", curWorkflow)
+ pager.AddParamString("actor", fmt.Sprint(actorID))
+ pager.AddParamString("status", fmt.Sprint(status))
+ ctx.Data["Page"] = pager
+ ctx.Data["HasWorkflowsOrRuns"] = len(workflows) > 0 || len(runs) > 0
+
+ ctx.HTML(http.StatusOK, tplListActions)
+}
diff --git a/routers/web/repo/actions/manual.go b/routers/web/repo/actions/manual.go
new file mode 100644
index 0000000..86a6014
--- /dev/null
+++ b/routers/web/repo/actions/manual.go
@@ -0,0 +1,62 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "net/url"
+
+ actions_service "code.gitea.io/gitea/services/actions"
+ context_module "code.gitea.io/gitea/services/context"
+)
+
+func ManualRunWorkflow(ctx *context_module.Context) {
+ workflowID := ctx.FormString("workflow")
+ if len(workflowID) == 0 {
+ ctx.ServerError("workflow", nil)
+ return
+ }
+
+ ref := ctx.FormString("ref")
+ if len(ref) == 0 {
+ ctx.ServerError("ref", nil)
+ return
+ }
+
+ if empty, err := ctx.Repo.GitRepo.IsEmpty(); err != nil {
+ ctx.ServerError("IsEmpty", err)
+ return
+ } else if empty {
+ ctx.NotFound("IsEmpty", nil)
+ return
+ }
+
+ workflow, err := actions_service.GetWorkflowFromCommit(ctx.Repo.GitRepo, ref, workflowID)
+ if err != nil {
+ ctx.ServerError("GetWorkflowFromCommit", err)
+ return
+ }
+
+ location := ctx.Repo.RepoLink + "/actions?workflow=" + url.QueryEscape(workflowID) +
+ "&actor=" + url.QueryEscape(ctx.FormString("actor")) +
+ "&status=" + url.QueryEscape(ctx.FormString("status"))
+
+ formKeyGetter := func(key string) string {
+ formKey := "inputs[" + key + "]"
+ return ctx.FormString(formKey)
+ }
+
+ if err := workflow.Dispatch(ctx, formKeyGetter, ctx.Repo.Repository, ctx.Doer); err != nil {
+ if actions_service.IsInputRequiredErr(err) {
+ ctx.Flash.Error(ctx.Locale.Tr("actions.workflow.dispatch.input_required", err.(actions_service.InputRequiredErr).Name))
+ ctx.Redirect(location)
+ return
+ }
+ ctx.ServerError("workflow.Dispatch", err)
+ return
+ }
+
+ // forward to the page of the run which was just created
+ ctx.Flash.Info(ctx.Locale.Tr("actions.workflow.dispatch.success"))
+ ctx.Redirect(location)
+}
diff --git a/routers/web/repo/actions/view.go b/routers/web/repo/actions/view.go
new file mode 100644
index 0000000..bc1ecbf
--- /dev/null
+++ b/routers/web/repo/actions/view.go
@@ -0,0 +1,781 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "archive/zip"
+ "compress/gzip"
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/common"
+ actions_service "code.gitea.io/gitea/services/actions"
+ context_module "code.gitea.io/gitea/services/context"
+
+ "xorm.io/builder"
+)
+
+func View(ctx *context_module.Context) {
+ ctx.Data["PageIsActions"] = true
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ job, _ := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+
+ workflowName := job.Run.WorkflowID
+
+ ctx.Data["RunIndex"] = runIndex
+ ctx.Data["JobIndex"] = jobIndex
+ ctx.Data["ActionsURL"] = ctx.Repo.RepoLink + "/actions"
+ ctx.Data["WorkflowName"] = workflowName
+ ctx.Data["WorkflowURL"] = ctx.Repo.RepoLink + "/actions?workflow=" + workflowName
+
+ ctx.HTML(http.StatusOK, tplViewActions)
+}
+
+func ViewLatest(ctx *context_module.Context) {
+ run, err := actions_model.GetLatestRun(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.NotFound("GetLatestRun", err)
+ return
+ }
+ err = run.LoadAttributes(ctx)
+ if err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Redirect(run.HTMLURL(), http.StatusTemporaryRedirect)
+}
+
+func ViewLatestWorkflowRun(ctx *context_module.Context) {
+ branch := ctx.FormString("branch")
+ if branch == "" {
+ branch = ctx.Repo.Repository.DefaultBranch
+ }
+ branch = fmt.Sprintf("refs/heads/%s", branch)
+ event := ctx.FormString("event")
+
+ workflowFile := ctx.Params("workflow_name")
+ run, err := actions_model.GetLatestRunForBranchAndWorkflow(ctx, ctx.Repo.Repository.ID, branch, workflowFile, event)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.NotFound("GetLatestRunForBranchAndWorkflow", err)
+ } else {
+ ctx.ServerError("GetLatestRunForBranchAndWorkflow", err)
+ }
+ return
+ }
+
+ err = run.LoadAttributes(ctx)
+ if err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Redirect(run.HTMLURL(), http.StatusTemporaryRedirect)
+}
+
+type ViewRequest struct {
+ LogCursors []struct {
+ Step int `json:"step"`
+ Cursor int64 `json:"cursor"`
+ Expanded bool `json:"expanded"`
+ } `json:"logCursors"`
+}
+
+type ViewResponse struct {
+ State struct {
+ Run struct {
+ Link string `json:"link"`
+ Title string `json:"title"`
+ Status string `json:"status"`
+ CanCancel bool `json:"canCancel"`
+ CanApprove bool `json:"canApprove"` // the run needs an approval and the doer has permission to approve
+ CanRerun bool `json:"canRerun"`
+ CanDeleteArtifact bool `json:"canDeleteArtifact"`
+ Done bool `json:"done"`
+ Jobs []*ViewJob `json:"jobs"`
+ Commit ViewCommit `json:"commit"`
+ } `json:"run"`
+ CurrentJob struct {
+ Title string `json:"title"`
+ Detail string `json:"detail"`
+ Steps []*ViewJobStep `json:"steps"`
+ } `json:"currentJob"`
+ } `json:"state"`
+ Logs struct {
+ StepsLog []*ViewStepLog `json:"stepsLog"`
+ } `json:"logs"`
+}
+
+type ViewJob struct {
+ ID int64 `json:"id"`
+ Name string `json:"name"`
+ Status string `json:"status"`
+ CanRerun bool `json:"canRerun"`
+ Duration string `json:"duration"`
+}
+
+type ViewCommit struct {
+ LocaleCommit string `json:"localeCommit"`
+ LocalePushedBy string `json:"localePushedBy"`
+ LocaleWorkflow string `json:"localeWorkflow"`
+ ShortSha string `json:"shortSHA"`
+ Link string `json:"link"`
+ Pusher ViewUser `json:"pusher"`
+ Branch ViewBranch `json:"branch"`
+}
+
+type ViewUser struct {
+ DisplayName string `json:"displayName"`
+ Link string `json:"link"`
+}
+
+type ViewBranch struct {
+ Name string `json:"name"`
+ Link string `json:"link"`
+}
+
+type ViewJobStep struct {
+ Summary string `json:"summary"`
+ Duration string `json:"duration"`
+ Status string `json:"status"`
+}
+
+type ViewStepLog struct {
+ Step int `json:"step"`
+ Cursor int64 `json:"cursor"`
+ Lines []*ViewStepLogLine `json:"lines"`
+ Started int64 `json:"started"`
+}
+
+type ViewStepLogLine struct {
+ Index int64 `json:"index"`
+ Message string `json:"message"`
+ Timestamp float64 `json:"timestamp"`
+}
+
+func ViewPost(ctx *context_module.Context) {
+ req := web.GetForm(ctx).(*ViewRequest)
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ current, jobs := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+ run := current.Run
+ if err := run.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ resp := &ViewResponse{}
+
+ resp.State.Run.Title = run.Title
+ resp.State.Run.Link = run.Link()
+ resp.State.Run.CanCancel = !run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanApprove = run.NeedApproval && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanRerun = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.CanDeleteArtifact = run.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions)
+ resp.State.Run.Done = run.Status.IsDone()
+ resp.State.Run.Jobs = make([]*ViewJob, 0, len(jobs)) // marshal to '[]' instead of 'null' in json
+ resp.State.Run.Status = run.Status.String()
+ for _, v := range jobs {
+ resp.State.Run.Jobs = append(resp.State.Run.Jobs, &ViewJob{
+ ID: v.ID,
+ Name: v.Name,
+ Status: v.Status.String(),
+ CanRerun: v.Status.IsDone() && ctx.Repo.CanWrite(unit.TypeActions),
+ Duration: v.Duration().String(),
+ })
+ }
+
+ pusher := ViewUser{
+ DisplayName: run.TriggerUser.GetDisplayName(),
+ Link: run.TriggerUser.HomeLink(),
+ }
+ branch := ViewBranch{
+ Name: run.PrettyRef(),
+ Link: run.RefLink(),
+ }
+ resp.State.Run.Commit = ViewCommit{
+ LocaleCommit: ctx.Locale.TrString("actions.runs.commit"),
+ LocalePushedBy: ctx.Locale.TrString("actions.runs.pushed_by"),
+ LocaleWorkflow: ctx.Locale.TrString("actions.runs.workflow"),
+ ShortSha: base.ShortSha(run.CommitSHA),
+ Link: fmt.Sprintf("%s/commit/%s", run.Repo.Link(), run.CommitSHA),
+ Pusher: pusher,
+ Branch: branch,
+ }
+
+ var task *actions_model.ActionTask
+ if current.TaskID > 0 {
+ var err error
+ task, err = actions_model.GetTaskByID(ctx, current.TaskID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ task.Job = current
+ if err := task.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ resp.State.CurrentJob.Title = current.Name
+ resp.State.CurrentJob.Detail = current.Status.LocaleString(ctx.Locale)
+ if run.NeedApproval {
+ resp.State.CurrentJob.Detail = ctx.Locale.TrString("actions.need_approval_desc")
+ }
+ resp.State.CurrentJob.Steps = make([]*ViewJobStep, 0) // marshal to '[]' instead of 'null' in json
+ resp.Logs.StepsLog = make([]*ViewStepLog, 0) // marshal to '[]' instead of 'null' in json
+ if task != nil {
+ steps := actions.FullSteps(task)
+
+ for _, v := range steps {
+ resp.State.CurrentJob.Steps = append(resp.State.CurrentJob.Steps, &ViewJobStep{
+ Summary: v.Name,
+ Duration: v.Duration().String(),
+ Status: v.Status.String(),
+ })
+ }
+
+ for _, cursor := range req.LogCursors {
+ if !cursor.Expanded {
+ continue
+ }
+
+ step := steps[cursor.Step]
+
+ // if task log is expired, return a consistent log line
+ if task.LogExpired {
+ if cursor.Cursor == 0 {
+ resp.Logs.StepsLog = append(resp.Logs.StepsLog, &ViewStepLog{
+ Step: cursor.Step,
+ Cursor: 1,
+ Lines: []*ViewStepLogLine{
+ {
+ Index: 1,
+ Message: ctx.Locale.TrString("actions.runs.expire_log_message"),
+ // Timestamp doesn't mean anything when the log is expired.
+ // Set it to the task's updated time since it's probably the time when the log has expired.
+ Timestamp: float64(task.Updated.AsTime().UnixNano()) / float64(time.Second),
+ },
+ },
+ Started: int64(step.Started),
+ })
+ }
+ continue
+ }
+
+ logLines := make([]*ViewStepLogLine, 0) // marshal to '[]' instead of 'null' in json
+
+ index := step.LogIndex + cursor.Cursor
+ validCursor := cursor.Cursor >= 0 &&
+ // !(cursor.Cursor < step.LogLength) when the frontend tries to fetch next line before it's ready.
+ // So return the same cursor and empty lines to let the frontend retry.
+ cursor.Cursor < step.LogLength &&
+ // !(index < task.LogIndexes[index]) when task data is older than step data.
+ // It can be fixed by making sure write/read tasks and steps in the same transaction,
+ // but it's easier to just treat it as fetching the next line before it's ready.
+ index < int64(len(task.LogIndexes))
+
+ if validCursor {
+ length := step.LogLength - cursor.Cursor
+ offset := task.LogIndexes[index]
+ var err error
+ logRows, err := actions.ReadLogs(ctx, task.LogInStorage, task.LogFilename, offset, length)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ for i, row := range logRows {
+ logLines = append(logLines, &ViewStepLogLine{
+ Index: cursor.Cursor + int64(i) + 1, // start at 1
+ Message: row.Content,
+ Timestamp: float64(row.Time.AsTime().UnixNano()) / float64(time.Second),
+ })
+ }
+ }
+
+ resp.Logs.StepsLog = append(resp.Logs.StepsLog, &ViewStepLog{
+ Step: cursor.Step,
+ Cursor: cursor.Cursor + int64(len(logLines)),
+ Lines: logLines,
+ Started: int64(step.Started),
+ })
+ }
+ }
+
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// Rerun will rerun jobs in the given run
+// If jobIndexStr is a blank string, it means rerun all jobs
+func Rerun(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ jobIndexStr := ctx.Params("job")
+ var jobIndex int64
+ if jobIndexStr != "" {
+ jobIndex, _ = strconv.ParseInt(jobIndexStr, 10, 64)
+ }
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ // can not rerun job when workflow is disabled
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+ if cfg.IsWorkflowDisabled(run.WorkflowID) {
+ ctx.JSONError(ctx.Locale.Tr("actions.workflow.disabled"))
+ return
+ }
+
+ // reset run's start and stop time when it is done
+ if run.Status.IsDone() {
+ run.PreviousDuration = run.Duration()
+ run.Started = 0
+ run.Stopped = 0
+ if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "previous_duration"); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ job, jobs := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+
+ if jobIndexStr == "" { // rerun all jobs
+ for _, j := range jobs {
+ // if the job has needs, it should be set to "blocked" status to wait for other jobs
+ shouldBlock := len(j.Needs) > 0
+ if err := rerunJob(ctx, j, shouldBlock); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+ ctx.JSON(http.StatusOK, struct{}{})
+ return
+ }
+
+ rerunJobs := actions_service.GetAllRerunJobs(job, jobs)
+
+ for _, j := range rerunJobs {
+ // jobs other than the specified one should be set to "blocked" status
+ shouldBlock := j.JobID != job.JobID
+ if err := rerunJob(ctx, j, shouldBlock); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func rerunJob(ctx *context_module.Context, job *actions_model.ActionRunJob, shouldBlock bool) error {
+ status := job.Status
+ if !status.IsDone() {
+ return nil
+ }
+
+ job.TaskID = 0
+ job.Status = actions_model.StatusWaiting
+ if shouldBlock {
+ job.Status = actions_model.StatusBlocked
+ }
+ job.Started = 0
+ job.Stopped = 0
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ _, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": status}, "task_id", "status", "started", "stopped")
+ return err
+ }); err != nil {
+ return err
+ }
+
+ actions_service.CreateCommitStatus(ctx, job)
+ return nil
+}
+
+func Logs(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ jobIndex := ctx.ParamsInt64("job")
+
+ job, _ := getRunJobs(ctx, runIndex, jobIndex)
+ if ctx.Written() {
+ return
+ }
+ if job.TaskID == 0 {
+ ctx.Error(http.StatusNotFound, "job is not started")
+ return
+ }
+
+ err := job.LoadRun(ctx)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ task, err := actions_model.GetTaskByID(ctx, job.TaskID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if task.LogExpired {
+ ctx.Error(http.StatusNotFound, "logs have been cleaned up")
+ return
+ }
+
+ reader, err := actions.OpenLogs(ctx, task.LogInStorage, task.LogFilename)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ defer reader.Close()
+
+ workflowName := job.Run.WorkflowID
+ if p := strings.Index(workflowName, "."); p > 0 {
+ workflowName = workflowName[0:p]
+ }
+ ctx.ServeContent(reader, &context_module.ServeHeaderOptions{
+ Filename: fmt.Sprintf("%v-%v-%v.log", workflowName, job.Name, task.ID),
+ ContentLength: &task.LogSize,
+ ContentType: "text/plain",
+ ContentTypeCharset: "utf-8",
+ Disposition: "attachment",
+ })
+}
+
+func Cancel(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+
+ _, jobs := getRunJobs(ctx, runIndex, -1)
+ if ctx.Written() {
+ return
+ }
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ for _, job := range jobs {
+ status := job.Status
+ if status.IsDone() {
+ continue
+ }
+ if job.TaskID == 0 {
+ job.Status = actions_model.StatusCancelled
+ job.Stopped = timeutil.TimeStampNow()
+ n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
+ if err != nil {
+ return err
+ }
+ if n == 0 {
+ return fmt.Errorf("job has changed, try again")
+ }
+ continue
+ }
+ if err := actions_model.StopTask(ctx, job.TaskID, actions_model.StatusCancelled); err != nil {
+ return err
+ }
+ }
+ return nil
+ }); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ actions_service.CreateCommitStatus(ctx, jobs...)
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func Approve(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+
+ current, jobs := getRunJobs(ctx, runIndex, -1)
+ if ctx.Written() {
+ return
+ }
+ run := current.Run
+ doer := ctx.Doer
+
+ if err := db.WithTx(ctx, func(ctx context.Context) error {
+ run.NeedApproval = false
+ run.ApprovedBy = doer.ID
+ if err := actions_model.UpdateRun(ctx, run, "need_approval", "approved_by"); err != nil {
+ return err
+ }
+ for _, job := range jobs {
+ if len(job.Needs) == 0 && job.Status.IsBlocked() {
+ job.Status = actions_model.StatusWaiting
+ _, err := actions_model.UpdateRunJob(ctx, job, nil, "status")
+ if err != nil {
+ return err
+ }
+ }
+ }
+ return nil
+ }); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ actions_service.CreateCommitStatus(ctx, jobs...)
+
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+// getRunJobs gets the jobs of runIndex, and returns jobs[jobIndex], jobs.
+// Any error will be written to the ctx.
+// It never returns a nil job of an empty jobs, if the jobIndex is out of range, it will be treated as 0.
+func getRunJobs(ctx *context_module.Context, runIndex, jobIndex int64) (*actions_model.ActionRunJob, []*actions_model.ActionRunJob) {
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return nil, nil
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return nil, nil
+ }
+ run.Repo = ctx.Repo.Repository
+
+ jobs, err := actions_model.GetRunJobsByRunID(ctx, run.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return nil, nil
+ }
+ if len(jobs) == 0 {
+ ctx.Error(http.StatusNotFound)
+ return nil, nil
+ }
+
+ for _, v := range jobs {
+ v.Run = run
+ }
+
+ if jobIndex >= 0 && jobIndex < int64(len(jobs)) {
+ return jobs[jobIndex], jobs
+ }
+ return jobs[0], jobs
+}
+
+type ArtifactsViewResponse struct {
+ Artifacts []*ArtifactsViewItem `json:"artifacts"`
+}
+
+type ArtifactsViewItem struct {
+ Name string `json:"name"`
+ Size int64 `json:"size"`
+ Status string `json:"status"`
+}
+
+func ArtifactsView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ artifacts, err := actions_model.ListUploadedArtifactsMeta(ctx, run.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ artifactsResponse := ArtifactsViewResponse{
+ Artifacts: make([]*ArtifactsViewItem, 0, len(artifacts)),
+ }
+ for _, art := range artifacts {
+ status := "completed"
+ if art.Status == actions_model.ArtifactStatusExpired {
+ status = "expired"
+ }
+ artifactsResponse.Artifacts = append(artifactsResponse.Artifacts, &ArtifactsViewItem{
+ Name: art.ArtifactName,
+ Size: art.FileSize,
+ Status: status,
+ })
+ }
+ ctx.JSON(http.StatusOK, artifactsResponse)
+}
+
+func ArtifactsDeleteView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ artifactName := ctx.Params("artifact_name")
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ ctx.NotFoundOrServerError("GetRunByIndex", func(err error) bool {
+ return errors.Is(err, util.ErrNotExist)
+ }, err)
+ return
+ }
+ if err = actions_model.SetArtifactNeedDelete(ctx, run.ID, artifactName); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ ctx.JSON(http.StatusOK, struct{}{})
+}
+
+func ArtifactsDownloadView(ctx *context_module.Context) {
+ runIndex := ctx.ParamsInt64("run")
+ artifactName := ctx.Params("artifact_name")
+
+ run, err := actions_model.GetRunByIndex(ctx, ctx.Repo.Repository.ID, runIndex)
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ artifacts, err := db.Find[actions_model.ActionArtifact](ctx, actions_model.FindArtifactsOptions{
+ RunID: run.ID,
+ ArtifactName: artifactName,
+ })
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if len(artifacts) == 0 {
+ ctx.Error(http.StatusNotFound, "artifact not found")
+ return
+ }
+
+ // if artifacts status is not uploaded-confirmed, treat it as not found
+ for _, art := range artifacts {
+ if art.Status != int64(actions_model.ArtifactStatusUploadConfirmed) {
+ ctx.Error(http.StatusNotFound, "artifact not found")
+ return
+ }
+ }
+
+ // Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
+ // The v4 backend ensures ContentEncoding is set to "application/zip", which is not the case for the old backend
+ if len(artifacts) == 1 && artifacts[0].ArtifactName+".zip" == artifacts[0].ArtifactPath && artifacts[0].ContentEncoding == "application/zip" {
+ art := artifacts[0]
+ if setting.Actions.ArtifactStorage.MinioConfig.ServeDirect {
+ u, err := storage.ActionsArtifacts.URL(art.StoragePath, art.ArtifactPath)
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+ f, err := storage.ActionsArtifacts.Open(art.StoragePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ common.ServeContentByReadSeeker(ctx.Base, artifactName, util.ToPointer(art.UpdatedUnix.AsTime()), f)
+ return
+ }
+
+ // Artifacts using the v1-v3 backend are stored as multiple individual files per artifact on the backend
+ // Those need to be zipped for download
+ ctx.Resp.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=%s.zip; filename*=UTF-8''%s.zip", url.PathEscape(artifactName), artifactName))
+ writer := zip.NewWriter(ctx.Resp)
+ defer writer.Close()
+ for _, art := range artifacts {
+ f, err := storage.ActionsArtifacts.Open(art.StoragePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ var r io.ReadCloser
+ if art.ContentEncoding == "gzip" {
+ r, err = gzip.NewReader(f)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ } else {
+ r = f
+ }
+ defer r.Close()
+
+ w, err := writer.Create(art.ArtifactPath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ if _, err := io.Copy(w, r); err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+}
+
+func DisableWorkflowFile(ctx *context_module.Context) {
+ disableOrEnableWorkflowFile(ctx, false)
+}
+
+func EnableWorkflowFile(ctx *context_module.Context) {
+ disableOrEnableWorkflowFile(ctx, true)
+}
+
+func disableOrEnableWorkflowFile(ctx *context_module.Context, isEnable bool) {
+ workflow := ctx.FormString("workflow")
+ if len(workflow) == 0 {
+ ctx.ServerError("workflow", nil)
+ return
+ }
+
+ cfgUnit := ctx.Repo.Repository.MustGetUnit(ctx, unit.TypeActions)
+ cfg := cfgUnit.ActionsConfig()
+
+ if isEnable {
+ cfg.EnableWorkflow(workflow)
+ } else {
+ cfg.DisableWorkflow(workflow)
+ }
+
+ if err := repo_model.UpdateRepoUnit(ctx, cfgUnit); err != nil {
+ ctx.ServerError("UpdateRepoUnit", err)
+ return
+ }
+
+ if isEnable {
+ ctx.Flash.Success(ctx.Tr("actions.workflow.enable_success", workflow))
+ } else {
+ ctx.Flash.Success(ctx.Tr("actions.workflow.disable_success", workflow))
+ }
+
+ redirectURL := fmt.Sprintf("%s/actions?workflow=%s&actor=%s&status=%s", ctx.Repo.RepoLink, url.QueryEscape(workflow),
+ url.QueryEscape(ctx.FormString("actor")), url.QueryEscape(ctx.FormString("status")))
+ ctx.JSONRedirect(redirectURL)
+}
diff --git a/routers/web/repo/activity.go b/routers/web/repo/activity.go
new file mode 100644
index 0000000..ba776c8
--- /dev/null
+++ b/routers/web/repo/activity.go
@@ -0,0 +1,105 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplActivity base.TplName = "repo/activity"
+)
+
+// Activity render the page to show repository latest changes
+func Activity(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity")
+ ctx.Data["PageIsActivity"] = true
+
+ ctx.Data["PageIsPulse"] = true
+
+ ctx.Data["Period"] = ctx.Params("period")
+
+ timeUntil := time.Now()
+ var timeFrom time.Time
+
+ switch ctx.Data["Period"] {
+ case "daily":
+ timeFrom = timeUntil.Add(-time.Hour * 24)
+ case "halfweekly":
+ timeFrom = timeUntil.Add(-time.Hour * 72)
+ case "weekly":
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ case "monthly":
+ timeFrom = timeUntil.AddDate(0, -1, 0)
+ case "quarterly":
+ timeFrom = timeUntil.AddDate(0, -3, 0)
+ case "semiyearly":
+ timeFrom = timeUntil.AddDate(0, -6, 0)
+ case "yearly":
+ timeFrom = timeUntil.AddDate(-1, 0, 0)
+ default:
+ ctx.Data["Period"] = "weekly"
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ }
+ ctx.Data["DateFrom"] = timeFrom.UTC().Format(time.RFC3339)
+ ctx.Data["DateUntil"] = timeUntil.UTC().Format(time.RFC3339)
+ ctx.Data["PeriodText"] = ctx.Tr("repo.activity.period." + ctx.Data["Period"].(string))
+
+ var err error
+ if ctx.Data["Activity"], err = activities_model.GetActivityStats(ctx, ctx.Repo.Repository, timeFrom,
+ ctx.Repo.CanRead(unit.TypeReleases),
+ ctx.Repo.CanRead(unit.TypeIssues),
+ ctx.Repo.CanRead(unit.TypePullRequests),
+ ctx.Repo.CanRead(unit.TypeCode) && !ctx.Repo.Repository.IsEmpty); err != nil {
+ ctx.ServerError("GetActivityStats", err)
+ return
+ }
+
+ if ctx.PageData["repoActivityTopAuthors"], err = activities_model.GetActivityStatsTopAuthors(ctx, ctx.Repo.Repository, timeFrom, 10); err != nil {
+ ctx.ServerError("GetActivityStatsTopAuthors", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplActivity)
+}
+
+// ActivityAuthors renders JSON with top commit authors for given time period over all branches
+func ActivityAuthors(ctx *context.Context) {
+ timeUntil := time.Now()
+ var timeFrom time.Time
+
+ switch ctx.Params("period") {
+ case "daily":
+ timeFrom = timeUntil.Add(-time.Hour * 24)
+ case "halfweekly":
+ timeFrom = timeUntil.Add(-time.Hour * 72)
+ case "weekly":
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ case "monthly":
+ timeFrom = timeUntil.AddDate(0, -1, 0)
+ case "quarterly":
+ timeFrom = timeUntil.AddDate(0, -3, 0)
+ case "semiyearly":
+ timeFrom = timeUntil.AddDate(0, -6, 0)
+ case "yearly":
+ timeFrom = timeUntil.AddDate(-1, 0, 0)
+ default:
+ timeFrom = timeUntil.Add(-time.Hour * 168)
+ }
+
+ var err error
+ authors, err := activities_model.GetActivityStatsTopAuthors(ctx, ctx.Repo.Repository, timeFrom, 10)
+ if err != nil {
+ ctx.ServerError("GetActivityStatsTopAuthors", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, authors)
+}
diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go
new file mode 100644
index 0000000..b42effd
--- /dev/null
+++ b/routers/web/repo/attachment.go
@@ -0,0 +1,163 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "net/http"
+
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/attachment"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// UploadIssueAttachment response for Issue/PR attachments
+func UploadIssueAttachment(ctx *context.Context) {
+ uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Attachment.AllowedTypes)
+}
+
+// UploadReleaseAttachment response for uploading release attachments
+func UploadReleaseAttachment(ctx *context.Context) {
+ uploadAttachment(ctx, ctx.Repo.Repository.ID, setting.Repository.Release.AllowedTypes)
+}
+
+// UploadAttachment response for uploading attachments
+func uploadAttachment(ctx *context.Context, repoID int64, allowedTypes string) {
+ if !setting.Attachment.Enabled {
+ ctx.Error(http.StatusNotFound, "attachment is not enabled")
+ return
+ }
+
+ file, header, err := ctx.Req.FormFile("file")
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err))
+ return
+ }
+ defer file.Close()
+
+ attach, err := attachment.UploadAttachment(ctx, file, allowedTypes, header.Size, &repo_model.Attachment{
+ Name: header.Filename,
+ UploaderID: ctx.Doer.ID,
+ RepoID: repoID,
+ })
+ if err != nil {
+ if upload.IsErrFileTypeForbidden(err) {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("NewAttachment: %v", err))
+ return
+ }
+
+ log.Trace("New attachment uploaded: %s", attach.UUID)
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": attach.UUID,
+ })
+}
+
+// DeleteAttachment response for deleting issue's attachment
+func DeleteAttachment(ctx *context.Context) {
+ file := ctx.FormString("file")
+ attach, err := repo_model.GetAttachmentByUUID(ctx, file)
+ if err != nil {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+ if !ctx.IsSigned || (ctx.Doer.ID != attach.UploaderID) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ err = repo_model.DeleteAttachment(ctx, attach, true)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("DeleteAttachment: %v", err))
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": attach.UUID,
+ })
+}
+
+// GetAttachment serve attachments with the given UUID
+func ServeAttachment(ctx *context.Context, uuid string) {
+ attach, err := repo_model.GetAttachmentByUUID(ctx, uuid)
+ if err != nil {
+ if repo_model.IsErrAttachmentNotExist(err) {
+ ctx.Error(http.StatusNotFound)
+ } else {
+ ctx.ServerError("GetAttachmentByUUID", err)
+ }
+ return
+ }
+
+ repository, unitType, err := repo_service.LinkedRepository(ctx, attach)
+ if err != nil {
+ ctx.ServerError("LinkedRepository", err)
+ return
+ }
+
+ if repository == nil { // If not linked
+ if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ } else { // If we have the repository we check access
+ perm, err := access_model.GetUserRepoPermission(ctx, repository, ctx.Doer)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetUserRepoPermission", err.Error())
+ return
+ }
+ if !perm.CanRead(unitType) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ if attach.ExternalURL != "" {
+ ctx.Redirect(attach.ExternalURL)
+ return
+ }
+
+ if err := attach.IncreaseDownloadCount(ctx); err != nil {
+ ctx.ServerError("IncreaseDownloadCount", err)
+ return
+ }
+
+ if setting.Attachment.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.Attachments.URL(attach.RelativePath(), attach.Name)
+
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+
+ if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+attach.UUID+`"`) {
+ return
+ }
+
+ // If we have matched and access to release or issue
+ fr, err := storage.Attachments.Open(attach.RelativePath())
+ if err != nil {
+ ctx.ServerError("Open", err)
+ return
+ }
+ defer fr.Close()
+
+ common.ServeContentByReadSeeker(ctx.Base, attach.Name, util.ToPointer(attach.CreatedUnix.AsTime()), fr)
+}
+
+// GetAttachment serve attachments
+func GetAttachment(ctx *context.Context) {
+ ServeAttachment(ctx, ctx.Params(":uuid"))
+}
diff --git a/routers/web/repo/badges/badges.go b/routers/web/repo/badges/badges.go
new file mode 100644
index 0000000..a2306d5
--- /dev/null
+++ b/routers/web/repo/badges/badges.go
@@ -0,0 +1,164 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package badges
+
+import (
+ "fmt"
+ "net/url"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/setting"
+ context_module "code.gitea.io/gitea/services/context"
+)
+
+func getBadgeURL(ctx *context_module.Context, label, text, color string) string {
+ sb := &strings.Builder{}
+ _ = setting.Badges.GeneratorURLTemplateTemplate.Execute(sb, map[string]string{
+ "label": url.PathEscape(strings.ReplaceAll(label, "-", "--")),
+ "text": url.PathEscape(strings.ReplaceAll(text, "-", "--")),
+ "color": url.PathEscape(color),
+ })
+
+ badgeURL := sb.String()
+ q := ctx.Req.URL.Query()
+ // Remove any `branch` or `event` query parameters. They're used by the
+ // workflow badge route, and do not need forwarding to the badge generator.
+ delete(q, "branch")
+ delete(q, "event")
+ if len(q) > 0 {
+ return fmt.Sprintf("%s?%s", badgeURL, q.Encode())
+ }
+ return badgeURL
+}
+
+func redirectToBadge(ctx *context_module.Context, label, text, color string) {
+ ctx.Redirect(getBadgeURL(ctx, label, text, color))
+}
+
+func errorBadge(ctx *context_module.Context, label, text string) { //nolint:unparam
+ ctx.Redirect(getBadgeURL(ctx, label, text, "crimson"))
+}
+
+func GetWorkflowBadge(ctx *context_module.Context) {
+ branch := ctx.Req.URL.Query().Get("branch")
+ if branch != "" {
+ branch = fmt.Sprintf("refs/heads/%s", branch)
+ }
+ event := ctx.Req.URL.Query().Get("event")
+
+ workflowFile := ctx.Params("workflow_name")
+ run, err := actions_model.GetLatestRunForBranchAndWorkflow(ctx, ctx.Repo.Repository.ID, branch, workflowFile, event)
+ if err != nil {
+ errorBadge(ctx, workflowFile, "Not found")
+ return
+ }
+
+ var color string
+ switch run.Status {
+ case actions_model.StatusUnknown:
+ color = "lightgrey"
+ case actions_model.StatusWaiting:
+ color = "lightgrey"
+ case actions_model.StatusRunning:
+ color = "gold"
+ case actions_model.StatusSuccess:
+ color = "brightgreen"
+ case actions_model.StatusFailure:
+ color = "crimson"
+ case actions_model.StatusCancelled:
+ color = "orange"
+ case actions_model.StatusSkipped:
+ color = "blue"
+ case actions_model.StatusBlocked:
+ color = "yellow"
+ default:
+ color = "lightgrey"
+ }
+
+ redirectToBadge(ctx, workflowFile, run.Status.String(), color)
+}
+
+func getIssueOrPullBadge(ctx *context_module.Context, label, variant string, num int) {
+ var text string
+ if len(variant) > 0 {
+ text = fmt.Sprintf("%d %s", num, variant)
+ } else {
+ text = fmt.Sprintf("%d", num)
+ }
+ redirectToBadge(ctx, label, text, "blue")
+}
+
+func getIssueBadge(ctx *context_module.Context, variant string, num int) {
+ if !ctx.Repo.CanRead(unit.TypeIssues) &&
+ !ctx.Repo.CanRead(unit.TypeExternalTracker) {
+ errorBadge(ctx, "issues", "Not found")
+ return
+ }
+
+ _, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil {
+ errorBadge(ctx, "issues", "Not found")
+ return
+ }
+
+ getIssueOrPullBadge(ctx, "issues", variant, num)
+}
+
+func getPullBadge(ctx *context_module.Context, variant string, num int) {
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ errorBadge(ctx, "pulls", "Not found")
+ return
+ }
+
+ getIssueOrPullBadge(ctx, "pulls", variant, num)
+}
+
+func GetOpenIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "open", ctx.Repo.Repository.NumOpenIssues)
+}
+
+func GetClosedIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "closed", ctx.Repo.Repository.NumClosedIssues)
+}
+
+func GetTotalIssuesBadge(ctx *context_module.Context) {
+ getIssueBadge(ctx, "", ctx.Repo.Repository.NumIssues)
+}
+
+func GetOpenPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "open", ctx.Repo.Repository.NumOpenPulls)
+}
+
+func GetClosedPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "closed", ctx.Repo.Repository.NumClosedPulls)
+}
+
+func GetTotalPullsBadge(ctx *context_module.Context) {
+ getPullBadge(ctx, "", ctx.Repo.Repository.NumPulls)
+}
+
+func GetStarsBadge(ctx *context_module.Context) {
+ redirectToBadge(ctx, "stars", fmt.Sprintf("%d", ctx.Repo.Repository.NumStars), "blue")
+}
+
+func GetLatestReleaseBadge(ctx *context_module.Context) {
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ errorBadge(ctx, "release", "Not found")
+ return
+ }
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ redirectToBadge(ctx, "release", release.TagName, "blue")
+}
diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go
new file mode 100644
index 0000000..eea3d4d
--- /dev/null
+++ b/routers/web/repo/blame.go
@@ -0,0 +1,298 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ gotemplate "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/highlight"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ files_service "code.gitea.io/gitea/services/repository/files"
+)
+
+type blameRow struct {
+ RowNumber int
+ Avatar gotemplate.HTML
+ RepoLink string
+ PartSha string
+ PreviousSha string
+ PreviousShaURL string
+ IsFirstCommit bool
+ CommitURL string
+ CommitMessage string
+ CommitSince gotemplate.HTML
+ Code gotemplate.HTML
+ EscapeStatus *charset.EscapeStatus
+}
+
+// RefBlame render blame page
+func RefBlame(ctx *context.Context) {
+ if ctx.Repo.TreePath == "" {
+ ctx.NotFound("No file specified", nil)
+ return
+ }
+
+ paths := make([]string, 0, 5)
+ treeNames := strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+ blob := entry.Blob()
+
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["IsBlame"] = true
+
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ ctx.Data["Paths"] = paths
+ ctx.Data["TreeNames"] = treeNames
+
+ ctx.Data["FileSize"] = blob.Size()
+ ctx.Data["FileName"] = blob.Name()
+
+ // Do not display a blame view if the size of the file is
+ // larger than what is configured as the maximum.
+ if blob.Size() >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ ctx.HTML(http.StatusOK, tplRepoHome)
+ return
+ }
+
+ ctx.Data["NumLinesSet"] = true
+ ctx.Data["NumLines"], err = blob.GetBlobLineCount()
+ if err != nil {
+ ctx.ServerError("GetBlobLineCount", err)
+ return
+ }
+
+ result, err := performBlame(ctx, ctx.Repo.Commit, ctx.Repo.TreePath, ctx.FormBool("bypass-blame-ignore"))
+ if err != nil {
+ ctx.ServerError("performBlame", err)
+ return
+ }
+
+ ctx.Data["UsesIgnoreRevs"] = result.UsesIgnoreRevs
+ ctx.Data["FaultyIgnoreRevsFile"] = result.FaultyIgnoreRevsFile
+
+ commitNames := processBlameParts(ctx, result.Parts)
+ if ctx.Written() {
+ return
+ }
+
+ renderBlame(ctx, result.Parts, commitNames)
+
+ ctx.HTML(http.StatusOK, tplRepoHome)
+}
+
+type blameResult struct {
+ Parts []*git.BlamePart
+ UsesIgnoreRevs bool
+ FaultyIgnoreRevsFile bool
+}
+
+func performBlame(ctx *context.Context, commit *git.Commit, file string, bypassBlameIgnore bool) (*blameResult, error) {
+ repoPath := ctx.Repo.Repository.RepoPath()
+ objectFormat := ctx.Repo.GetObjectFormat()
+
+ blameReader, err := git.CreateBlameReader(ctx, objectFormat, repoPath, commit, file, bypassBlameIgnore)
+ if err != nil {
+ return nil, err
+ }
+
+ r := &blameResult{}
+ if err := fillBlameResult(blameReader, r); err != nil {
+ _ = blameReader.Close()
+ return nil, err
+ }
+
+ err = blameReader.Close()
+ if err != nil {
+ if len(r.Parts) == 0 && r.UsesIgnoreRevs {
+ // try again without ignored revs
+
+ blameReader, err = git.CreateBlameReader(ctx, objectFormat, repoPath, commit, file, true)
+ if err != nil {
+ return nil, err
+ }
+
+ r := &blameResult{
+ FaultyIgnoreRevsFile: true,
+ }
+ if err := fillBlameResult(blameReader, r); err != nil {
+ _ = blameReader.Close()
+ return nil, err
+ }
+
+ return r, blameReader.Close()
+ }
+ return nil, err
+ }
+ return r, nil
+}
+
+func fillBlameResult(br *git.BlameReader, r *blameResult) error {
+ r.UsesIgnoreRevs = br.UsesIgnoreRevs()
+
+ previousHelper := make(map[string]*git.BlamePart)
+
+ r.Parts = make([]*git.BlamePart, 0, 5)
+ for {
+ blamePart, err := br.NextPart()
+ if err != nil {
+ return fmt.Errorf("BlameReader.NextPart failed: %w", err)
+ }
+ if blamePart == nil {
+ break
+ }
+
+ if prev, ok := previousHelper[blamePart.Sha]; ok {
+ if blamePart.PreviousSha == "" {
+ blamePart.PreviousSha = prev.PreviousSha
+ blamePart.PreviousPath = prev.PreviousPath
+ }
+ } else {
+ previousHelper[blamePart.Sha] = blamePart
+ }
+
+ r.Parts = append(r.Parts, blamePart)
+ }
+
+ return nil
+}
+
+func processBlameParts(ctx *context.Context, blameParts []*git.BlamePart) map[string]*user_model.UserCommit {
+ // store commit data by SHA to look up avatar info etc
+ commitNames := make(map[string]*user_model.UserCommit)
+ // and as blameParts can reference the same commits multiple
+ // times, we cache the lookup work locally
+ commits := make([]*git.Commit, 0, len(blameParts))
+ commitCache := map[string]*git.Commit{}
+ commitCache[ctx.Repo.Commit.ID.String()] = ctx.Repo.Commit
+
+ for _, part := range blameParts {
+ sha := part.Sha
+ if _, ok := commitNames[sha]; ok {
+ continue
+ }
+
+ // find the blamePart commit, to look up parent & email address for avatars
+ commit, ok := commitCache[sha]
+ var err error
+ if !ok {
+ commit, err = ctx.Repo.GitRepo.GetCommit(sha)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Repo.GitRepo.GetCommit", err)
+ } else {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ }
+ return nil
+ }
+ commitCache[sha] = commit
+ }
+
+ commits = append(commits, commit)
+ }
+
+ // populate commit email addresses to later look up avatars.
+ for _, c := range user_model.ValidateCommitsWithEmails(ctx, commits) {
+ commitNames[c.ID.String()] = c
+ }
+
+ return commitNames
+}
+
+func renderBlame(ctx *context.Context, blameParts []*git.BlamePart, commitNames map[string]*user_model.UserCommit) {
+ repoLink := ctx.Repo.RepoLink
+
+ language, err := files_service.TryGetContentLanguage(ctx.Repo.GitRepo, ctx.Repo.CommitID, ctx.Repo.TreePath)
+ if err != nil {
+ log.Error("Unable to get file language for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
+ }
+
+ lines := make([]string, 0)
+ rows := make([]*blameRow, 0)
+ escapeStatus := &charset.EscapeStatus{}
+
+ var lexerName string
+
+ avatarUtils := templates.NewAvatarUtils(ctx)
+ i := 0
+ commitCnt := 0
+ for _, part := range blameParts {
+ for index, line := range part.Lines {
+ i++
+ lines = append(lines, line)
+
+ br := &blameRow{
+ RowNumber: i,
+ }
+
+ commit := commitNames[part.Sha]
+ if index == 0 {
+ // Count commit number
+ commitCnt++
+
+ // User avatar image
+ commitSince := timeutil.TimeSinceUnix(timeutil.TimeStamp(commit.Author.When.Unix()), ctx.Locale)
+
+ var avatar string
+ if commit.User != nil {
+ avatar = string(avatarUtils.Avatar(commit.User, 18))
+ } else {
+ avatar = string(avatarUtils.AvatarByEmail(commit.Author.Email, commit.Author.Name, 18, "tw-mr-2"))
+ }
+
+ br.Avatar = gotemplate.HTML(avatar)
+ br.RepoLink = repoLink
+ br.PartSha = part.Sha
+ br.PreviousSha = part.PreviousSha
+ br.PreviousShaURL = fmt.Sprintf("%s/blame/commit/%s/%s", repoLink, url.PathEscape(part.PreviousSha), util.PathEscapeSegments(part.PreviousPath))
+ br.CommitURL = fmt.Sprintf("%s/commit/%s", repoLink, url.PathEscape(part.Sha))
+ br.CommitMessage = commit.CommitMessage
+ br.CommitSince = commitSince
+ }
+
+ if i != len(lines)-1 {
+ line += "\n"
+ }
+ fileName := fmt.Sprintf("%v", ctx.Data["FileName"])
+ line, lexerNameForLine := highlight.Code(fileName, language, line)
+
+ // set lexer name to the first detected lexer. this is certainly suboptimal and
+ // we should instead highlight the whole file at once
+ if lexerName == "" {
+ lexerName = lexerNameForLine
+ }
+
+ br.EscapeStatus, br.Code = charset.EscapeControlHTML(line, ctx.Locale, charset.FileviewContext)
+ rows = append(rows, br)
+ escapeStatus = escapeStatus.Or(br.EscapeStatus)
+ }
+ }
+
+ ctx.Data["EscapeStatus"] = escapeStatus
+ ctx.Data["BlameRows"] = rows
+ ctx.Data["CommitCnt"] = commitCnt
+ ctx.Data["LexerName"] = lexerName
+}
diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go
new file mode 100644
index 0000000..4897a5f
--- /dev/null
+++ b/routers/web/repo/branch.go
@@ -0,0 +1,262 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ release_service "code.gitea.io/gitea/services/release"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplBranch base.TplName = "repo/branch/list"
+)
+
+// Branches render repository branch page
+func Branches(ctx *context.Context) {
+ ctx.Data["Title"] = "Branches"
+ ctx.Data["IsRepoToolbarBranches"] = true
+ ctx.Data["AllowsPulls"] = ctx.Repo.Repository.AllowsPulls(ctx)
+ ctx.Data["IsWriter"] = ctx.Repo.CanWrite(unit.TypeCode)
+ ctx.Data["IsMirror"] = ctx.Repo.Repository.IsMirror
+ ctx.Data["CanPull"] = ctx.Repo.CanWrite(unit.TypeCode) ||
+ (ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID))
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["PageIsBranches"] = true
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ pageSize := setting.Git.BranchesRangeSize
+
+ kw := ctx.FormString("q")
+
+ defaultBranch, branches, branchesCount, err := repo_service.LoadBranches(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, optional.None[bool](), kw, page, pageSize)
+ if err != nil {
+ ctx.ServerError("LoadBranches", err)
+ return
+ }
+
+ commitIDs := []string{defaultBranch.DBBranch.CommitID}
+ for _, branch := range branches {
+ commitIDs = append(commitIDs, branch.DBBranch.CommitID)
+ }
+
+ commitStatuses, err := git_model.GetLatestCommitStatusForRepoCommitIDs(ctx, ctx.Repo.Repository.ID, commitIDs)
+ if err != nil {
+ ctx.ServerError("LoadBranches", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+
+ commitStatus := make(map[string]*git_model.CommitStatus)
+ for commitID, cs := range commitStatuses {
+ commitStatus[commitID] = git_model.CalcCommitStatus(cs)
+ }
+
+ ctx.Data["Keyword"] = kw
+ ctx.Data["Branches"] = branches
+ ctx.Data["CommitStatus"] = commitStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+ ctx.Data["DefaultBranchBranch"] = defaultBranch
+ pager := context.NewPagination(int(branchesCount), pageSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplBranch)
+}
+
+// DeleteBranchPost responses for delete merged branch
+func DeleteBranchPost(ctx *context.Context) {
+ defer redirect(ctx)
+ branchName := ctx.FormString("name")
+
+ if err := repo_service.DeleteBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, branchName); err != nil {
+ switch {
+ case git.IsErrBranchNotExist(err):
+ log.Debug("DeleteBranch: Can't delete non existing branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName))
+ case errors.Is(err, repo_service.ErrBranchIsDefault):
+ log.Debug("DeleteBranch: Can't delete default branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.default_deletion_failed", branchName))
+ case errors.Is(err, git_model.ErrBranchIsProtected):
+ log.Debug("DeleteBranch: Can't delete protected branch '%s'", branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.protected_deletion_failed", branchName))
+ default:
+ log.Error("DeleteBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", branchName))
+ }
+
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", branchName))
+}
+
+// RestoreBranchPost responses for delete merged branch
+func RestoreBranchPost(ctx *context.Context) {
+ defer redirect(ctx)
+
+ branchID := ctx.FormInt64("branch_id")
+ branchName := ctx.FormString("name")
+
+ deletedBranch, err := git_model.GetDeletedBranchByID(ctx, ctx.Repo.Repository.ID, branchID)
+ if err != nil {
+ log.Error("GetDeletedBranchByID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", branchName))
+ return
+ } else if deletedBranch == nil {
+ log.Debug("RestoreBranch: Can't restore branch[%d] '%s', as it does not exist", branchID, branchName)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", branchName))
+ return
+ }
+
+ if err := git.Push(ctx, ctx.Repo.Repository.RepoPath(), git.PushOptions{
+ Remote: ctx.Repo.Repository.RepoPath(),
+ Branch: fmt.Sprintf("%s:%s%s", deletedBranch.CommitID, git.BranchPrefix, deletedBranch.Name),
+ Env: repo_module.PushingEnvironment(ctx.Doer, ctx.Repo.Repository),
+ }); err != nil {
+ if strings.Contains(err.Error(), "already exists") {
+ log.Debug("RestoreBranch: Can't restore branch '%s', since one with same name already exist", deletedBranch.Name)
+ ctx.Flash.Error(ctx.Tr("repo.branch.already_exists", deletedBranch.Name))
+ return
+ }
+ log.Error("RestoreBranch: CreateBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.restore_failed", deletedBranch.Name))
+ return
+ }
+
+ objectFormat := git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName)
+
+ // Don't return error below this
+ if err := repo_service.PushUpdate(
+ &repo_module.PushUpdateOptions{
+ RefFullName: git.RefNameFromBranch(deletedBranch.Name),
+ OldCommitID: objectFormat.EmptyObjectID().String(),
+ NewCommitID: deletedBranch.CommitID,
+ PusherID: ctx.Doer.ID,
+ PusherName: ctx.Doer.Name,
+ RepoUserName: ctx.Repo.Owner.Name,
+ RepoName: ctx.Repo.Repository.Name,
+ }); err != nil {
+ log.Error("RestoreBranch: Update: %v", err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.restore_success", deletedBranch.Name))
+}
+
+func redirect(ctx *context.Context) {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/branches?page=" + url.QueryEscape(ctx.FormString("page")))
+}
+
+// CreateBranch creates new branch in repository
+func CreateBranch(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewBranchForm)
+ if !ctx.Repo.CanCreateBranch() {
+ ctx.NotFound("CreateBranch", nil)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.GetErrMsg())
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ var err error
+
+ if form.CreateTag {
+ target := ctx.Repo.CommitID
+ if ctx.Repo.IsViewBranch {
+ target = ctx.Repo.BranchName
+ }
+ err = release_service.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, target, form.NewBranchName, "")
+ } else if ctx.Repo.IsViewBranch {
+ err = repo_service.CreateNewBranch(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.Repo.BranchName, form.NewBranchName)
+ } else {
+ err = repo_service.CreateNewBranchFromCommit(ctx, ctx.Doer, ctx.Repo.Repository, ctx.Repo.GitRepo, ctx.Repo.CommitID, form.NewBranchName)
+ }
+ if err != nil {
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrTagAlreadyExists(err) {
+ e := err.(models.ErrTagAlreadyExists)
+ ctx.Flash.Error(ctx.Tr("repo.branch.tag_collision", e.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git_model.IsErrBranchAlreadyExists(err) || git.IsErrPushOutOfDate(err) {
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_already_exists", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git_model.IsErrBranchNameConflict(err) {
+ e := err.(git_model.ErrBranchNameConflict)
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_name_conflict", form.NewBranchName, e.BranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+ if git.IsErrPushRejected(err) {
+ e := err.(*git.ErrPushRejected)
+ if len(e.Message) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.editor.push_rejected_no_message"))
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(e.Message),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ ctx.ServerError("CreateNewBranch", err)
+ return
+ }
+
+ if form.CreateTag {
+ ctx.Flash.Success(ctx.Tr("repo.tag.create_success", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/tag/" + util.PathEscapeSegments(form.NewBranchName))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.create_success", form.NewBranchName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(form.NewBranchName) + "/" + util.PathEscapeSegments(form.CurrentPath))
+}
diff --git a/routers/web/repo/cherry_pick.go b/routers/web/repo/cherry_pick.go
new file mode 100644
index 0000000..90dae70
--- /dev/null
+++ b/routers/web/repo/cherry_pick.go
@@ -0,0 +1,192 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "errors"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/repository/files"
+)
+
+var tplCherryPick base.TplName = "repo/editor/cherry_pick"
+
+// CherryPick handles cherrypick GETs
+func CherryPick(ctx *context.Context) {
+ ctx.Data["SHA"] = ctx.Params(":sha")
+ cherryPickCommit, err := ctx.Repo.GitRepo.GetCommit(ctx.Params(":sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Missing Commit", err)
+ return
+ }
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+
+ if ctx.FormString("cherry-pick-type") == "revert" {
+ ctx.Data["CherryPickType"] = "revert"
+ ctx.Data["commit_summary"] = "revert " + ctx.Params(":sha")
+ ctx.Data["commit_message"] = "revert " + cherryPickCommit.Message()
+ } else {
+ ctx.Data["CherryPickType"] = "cherry-pick"
+ splits := strings.SplitN(cherryPickCommit.Message(), "\n", 2)
+ ctx.Data["commit_summary"] = splits[0]
+ ctx.Data["commit_message"] = splits[1]
+ }
+
+ canCommit := renderCommitRights(ctx)
+ ctx.Data["TreePath"] = ""
+
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ ctx.HTML(200, tplCherryPick)
+}
+
+// CherryPickPost handles cherrypick POSTs
+func CherryPickPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CherryPickForm)
+
+ sha := ctx.Params(":sha")
+ ctx.Data["SHA"] = sha
+ if form.Revert {
+ ctx.Data["CherryPickType"] = "revert"
+ } else {
+ ctx.Data["CherryPickType"] = "cherry-pick"
+ }
+
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ if ctx.HasError() {
+ ctx.HTML(200, tplCherryPick)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplCherryPick, &form)
+ return
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if message == "" {
+ if form.Revert {
+ message = ctx.Locale.TrString("repo.commit.revert-header", sha)
+ } else {
+ message = ctx.Locale.TrString("repo.commit.cherry-pick-header", sha)
+ }
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplCherryPick, &form)
+ if ctx.Written() {
+ return
+ }
+
+ opts := &files.ApplyDiffPatchOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }
+
+ // First lets try the simple plain read-tree -m approach
+ opts.Content = sha
+ if _, err := files.CherryPick(ctx, ctx.Repo.Repository, ctx.Doer, form.Revert, opts); err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplCherryPick, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ // Drop through to the apply technique
+
+ buf := &bytes.Buffer{}
+ if form.Revert {
+ if err := git.GetReverseRawDiff(ctx, ctx.Repo.Repository.RepoPath(), sha, buf); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff", errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+ } else {
+ if err := git.GetRawDiff(ctx.Repo.GitRepo, sha, git.RawDiffType("patch"), buf); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff", errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+ }
+
+ opts.Content = buf.String()
+ ctx.Data["FileContent"] = opts.Content
+
+ if _, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, opts); err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplCherryPick, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.editor.fail_to_apply_patch", err), tplPatchFile, &form)
+ return
+ }
+ }
+
+ if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName))
+ }
+}
diff --git a/routers/web/repo/code_frequency.go b/routers/web/repo/code_frequency.go
new file mode 100644
index 0000000..c76f492
--- /dev/null
+++ b/routers/web/repo/code_frequency.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplCodeFrequency base.TplName = "repo/activity"
+)
+
+// CodeFrequency renders the page to show repository code frequency
+func CodeFrequency(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.code_frequency")
+
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsCodeFrequency"] = true
+ ctx.PageData["repoLink"] = ctx.Repo.RepoLink
+
+ ctx.HTML(http.StatusOK, tplCodeFrequency)
+}
+
+// CodeFrequencyData returns JSON of code frequency data
+func CodeFrequencyData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("GetCodeFrequencyData", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
+ }
+}
diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go
new file mode 100644
index 0000000..0e5d1f0
--- /dev/null
+++ b/routers/web/repo/commit.go
@@ -0,0 +1,468 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "html/template"
+ "net/http"
+ "path"
+ "strings"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitgraph"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/gitdiff"
+ git_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplCommits base.TplName = "repo/commits"
+ tplGraph base.TplName = "repo/graph"
+ tplGraphDiv base.TplName = "repo/graph/div"
+ tplCommitPage base.TplName = "repo/commit_page"
+)
+
+// RefCommits render commits page
+func RefCommits(ctx *context.Context) {
+ switch {
+ case len(ctx.Repo.TreePath) == 0:
+ Commits(ctx)
+ case ctx.Repo.TreePath == "search":
+ SearchCommits(ctx)
+ default:
+ FileHistory(ctx)
+ }
+}
+
+// Commits render branch's commits
+func Commits(ctx *context.Context) {
+ ctx.Data["PageIsCommits"] = true
+ if ctx.Repo.Commit == nil {
+ ctx.NotFound("Commit not found", nil)
+ return
+ }
+ ctx.Data["PageIsViewCode"] = true
+
+ commitsCount, err := ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ pageSize := ctx.FormInt("limit")
+ if pageSize <= 0 {
+ pageSize = setting.Git.CommitsRangeSize
+ }
+
+ // Both `git log branchName` and `git log commitId` work.
+ commits, err := ctx.Repo.Commit.CommitsByRange(page, pageSize, "")
+ if err != nil {
+ ctx.ServerError("CommitsByRange", err)
+ return
+ }
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["CommitCount"] = commitsCount
+
+ pager := context.NewPagination(int(commitsCount), pageSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+// Graph render commit graph - show commits from all branches.
+func Graph(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.commit_graph")
+ ctx.Data["PageIsCommits"] = true
+ ctx.Data["PageIsViewCode"] = true
+ mode := strings.ToLower(ctx.FormTrim("mode"))
+ if mode != "monochrome" {
+ mode = "color"
+ }
+ ctx.Data["Mode"] = mode
+ hidePRRefs := ctx.FormBool("hide-pr-refs")
+ ctx.Data["HidePRRefs"] = hidePRRefs
+ branches := ctx.FormStrings("branch")
+ realBranches := make([]string, len(branches))
+ copy(realBranches, branches)
+ for i, branch := range realBranches {
+ if strings.HasPrefix(branch, "--") {
+ realBranches[i] = git.BranchPrefix + branch
+ }
+ }
+ ctx.Data["SelectedBranches"] = realBranches
+ files := ctx.FormStrings("file")
+
+ commitsCount, err := ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+
+ graphCommitsCount, err := ctx.Repo.GetCommitGraphsCount(ctx, hidePRRefs, realBranches, files)
+ if err != nil {
+ log.Warn("GetCommitGraphsCount error for generate graph exclude prs: %t branches: %s in %-v, Will Ignore branches and try again. Underlying Error: %v", hidePRRefs, branches, ctx.Repo.Repository, err)
+ realBranches = []string{}
+ branches = []string{}
+ graphCommitsCount, err = ctx.Repo.GetCommitGraphsCount(ctx, hidePRRefs, realBranches, files)
+ if err != nil {
+ ctx.ServerError("GetCommitGraphsCount", err)
+ return
+ }
+ }
+
+ page := ctx.FormInt("page")
+
+ graph, err := gitgraph.GetCommitGraph(ctx.Repo.GitRepo, page, 0, hidePRRefs, realBranches, files)
+ if err != nil {
+ ctx.ServerError("GetCommitGraph", err)
+ return
+ }
+
+ if err := graph.LoadAndProcessCommits(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo); err != nil {
+ ctx.ServerError("LoadAndProcessCommits", err)
+ return
+ }
+
+ ctx.Data["Graph"] = graph
+
+ gitRefs, err := ctx.Repo.GitRepo.GetRefs()
+ if err != nil {
+ ctx.ServerError("GitRepo.GetRefs", err)
+ return
+ }
+
+ ctx.Data["AllRefs"] = gitRefs
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["CommitCount"] = commitsCount
+
+ paginator := context.NewPagination(int(graphCommitsCount), setting.UI.GraphMaxCommitNum, page, 5)
+ paginator.AddParam(ctx, "mode", "Mode")
+ paginator.AddParam(ctx, "hide-pr-refs", "HidePRRefs")
+ for _, branch := range branches {
+ paginator.AddParamString("branch", branch)
+ }
+ for _, file := range files {
+ paginator.AddParamString("file", file)
+ }
+ ctx.Data["Page"] = paginator
+ if ctx.FormBool("div-only") {
+ ctx.HTML(http.StatusOK, tplGraphDiv)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplGraph)
+}
+
+// SearchCommits render commits filtered by keyword
+func SearchCommits(ctx *context.Context) {
+ ctx.Data["PageIsCommits"] = true
+ ctx.Data["PageIsViewCode"] = true
+
+ query := ctx.FormTrim("q")
+ if len(query) == 0 {
+ ctx.Redirect(ctx.Repo.RepoLink + "/commits/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ all := ctx.FormBool("all")
+ opts := git.NewSearchCommitsOptions(query, all)
+ commits, err := ctx.Repo.Commit.SearchCommits(opts)
+ if err != nil {
+ ctx.ServerError("SearchCommits", err)
+ return
+ }
+ ctx.Data["CommitCount"] = len(commits)
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Keyword"] = query
+ if all {
+ ctx.Data["All"] = true
+ }
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+// FileHistory show a file's reversions
+func FileHistory(ctx *context.Context) {
+ fileName := ctx.Repo.TreePath
+ if len(fileName) == 0 {
+ Commits(ctx)
+ return
+ }
+
+ commitsCount, err := ctx.Repo.GitRepo.FileCommitsCount(ctx.Repo.RefName, fileName)
+ if err != nil {
+ ctx.ServerError("FileCommitsCount", err)
+ return
+ } else if commitsCount == 0 {
+ ctx.NotFound("FileCommitsCount", nil)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ commits, err := ctx.Repo.GitRepo.CommitsByFileAndRange(
+ git.CommitsByFileAndRangeOptions{
+ Revision: ctx.Repo.RefName,
+ File: fileName,
+ Page: page,
+ })
+ if err != nil {
+ ctx.ServerError("CommitsByFileAndRange", err)
+ return
+ }
+
+ if len(commits) == 0 {
+ ctx.NotFound("CommitsByFileAndRange", nil)
+ return
+ }
+
+ oldestCommit := commits[len(commits)-1]
+
+ renamedFiles, err := git.GetCommitFileRenames(ctx, ctx.Repo.GitRepo.Path, oldestCommit.ID.String())
+ if err != nil {
+ ctx.ServerError("GetCommitFileRenames", err)
+ return
+ }
+
+ for _, renames := range renamedFiles {
+ if renames[1] == fileName {
+ ctx.Data["OldFilename"] = renames[0]
+ ctx.Data["OldFilenameHistory"] = fmt.Sprintf("%s/commits/commit/%s/%s", ctx.Repo.RepoLink, oldestCommit.ID.String(), renames[0])
+ break
+ }
+ }
+
+ ctx.Data["Commits"] = processGitCommits(ctx, commits)
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["FileName"] = fileName
+ ctx.Data["CommitCount"] = commitsCount
+
+ pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplCommits)
+}
+
+func LoadBranchesAndTags(ctx *context.Context) {
+ response, err := git_service.LoadBranchesAndTags(ctx, ctx.Repo, ctx.Params("sha"))
+ if err == nil {
+ ctx.JSON(http.StatusOK, response)
+ return
+ }
+ ctx.NotFoundOrServerError(fmt.Sprintf("could not load branches and tags the commit %s belongs to", ctx.Params("sha")), git.IsErrNotExist, err)
+}
+
+// Diff show different from current commit to previous commit
+func Diff(ctx *context.Context) {
+ ctx.Data["PageIsDiff"] = true
+
+ userName := ctx.Repo.Owner.Name
+ repoName := ctx.Repo.Repository.Name
+ commitID := ctx.Params(":sha")
+ var (
+ gitRepo *git.Repository
+ err error
+ )
+
+ if ctx.Data["PageIsWiki"] != nil {
+ gitRepo, err = gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ return
+ }
+ defer gitRepo.Close()
+ } else {
+ gitRepo = ctx.Repo.GitRepo
+ }
+
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("Repo.GitRepo.GetCommit", err)
+ } else {
+ ctx.ServerError("Repo.GitRepo.GetCommit", err)
+ }
+ return
+ }
+ if len(commitID) != commit.ID.Type().FullLength() {
+ commitID = commit.ID.String()
+ }
+
+ fileOnly := ctx.FormBool("file-only")
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if fileOnly && (len(files) == 2 || len(files) == 1) {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
+ AfterCommitID: commitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }, files...)
+ if err != nil {
+ ctx.NotFound("GetDiff", err)
+ return
+ }
+
+ parents := make([]string, commit.ParentCount())
+ for i := 0; i < commit.ParentCount(); i++ {
+ sha, err := commit.ParentID(i)
+ if err != nil {
+ ctx.NotFound("repo.Diff", err)
+ return
+ }
+ parents[i] = sha.String()
+ }
+
+ ctx.Data["CommitID"] = commitID
+ ctx.Data["AfterCommitID"] = commitID
+ ctx.Data["Username"] = userName
+ ctx.Data["Reponame"] = repoName
+
+ var parentCommit *git.Commit
+ if commit.ParentCount() > 0 {
+ parentCommit, err = gitRepo.GetCommit(parents[0])
+ if err != nil {
+ ctx.NotFound("GetParentCommit", err)
+ return
+ }
+ }
+ setCompareContext(ctx, parentCommit, commit, userName, repoName)
+ ctx.Data["Title"] = commit.Summary() + " · " + base.ShortSha(commitID)
+ ctx.Data["Commit"] = commit
+ ctx.Data["Diff"] = diff
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptionsAll)
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ }
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, statuses)
+ }
+
+ ctx.Data["CommitStatus"] = git_model.CalcCommitStatus(statuses)
+ ctx.Data["CommitStatuses"] = statuses
+
+ verification := asymkey_model.ParseCommitWithSignature(ctx, commit)
+ ctx.Data["Verification"] = verification
+ ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, commit)
+ ctx.Data["Parents"] = parents
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
+ }, nil); err != nil {
+ ctx.ServerError("CalculateTrustStatus", err)
+ return
+ }
+
+ note := &git.Note{}
+ err = git.GetNote(ctx, ctx.Repo.GitRepo, commitID, note)
+ if err == nil {
+ ctx.Data["NoteCommit"] = note.Commit
+ ctx.Data["NoteAuthor"] = user_model.ValidateCommitWithEmail(ctx, note.Commit)
+ ctx.Data["NoteRendered"], err = markup.RenderCommitMessage(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: path.Join("commit", util.PathEscapeSegments(commitID)),
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{}))))
+ if err != nil {
+ ctx.ServerError("RenderCommitMessage", err)
+ return
+ }
+ }
+
+ ctx.HTML(http.StatusOK, tplCommitPage)
+}
+
+// RawDiff dumps diff results of repository in given commit ID to io.Writer
+func RawDiff(ctx *context.Context) {
+ var gitRepo *git.Repository
+ if ctx.Data["PageIsWiki"] != nil {
+ wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return
+ }
+ defer wikiRepo.Close()
+ gitRepo = wikiRepo
+ } else {
+ gitRepo = ctx.Repo.GitRepo
+ if gitRepo == nil {
+ ctx.ServerError("GitRepo not open", fmt.Errorf("no open git repo for '%s'", ctx.Repo.Repository.FullName()))
+ return
+ }
+ }
+ if err := git.GetRawDiff(
+ gitRepo,
+ ctx.Params(":sha"),
+ git.RawDiffType(ctx.Params(":ext")),
+ ctx.Resp,
+ ); err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetRawDiff",
+ errors.New("commit "+ctx.Params(":sha")+" does not exist."))
+ return
+ }
+ ctx.ServerError("GetRawDiff", err)
+ return
+ }
+}
+
+func processGitCommits(ctx *context.Context, gitCommits []*git.Commit) []*git_model.SignCommitWithStatuses {
+ commits := git_model.ConvertFromGitCommit(ctx, gitCommits, ctx.Repo.Repository)
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ for _, commit := range commits {
+ if commit.Status == nil {
+ continue
+ }
+ commit.Status.HideActionsURL(ctx)
+ git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
+ }
+ }
+ return commits
+}
diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go
new file mode 100644
index 0000000..38d6004
--- /dev/null
+++ b/routers/web/repo/compare.go
@@ -0,0 +1,972 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bufio"
+ gocontext "context"
+ "encoding/csv"
+ "errors"
+ "fmt"
+ "html"
+ "io"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ csv_module "code.gitea.io/gitea/modules/csv"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/gitdiff"
+)
+
+const (
+ tplCompare base.TplName = "repo/diff/compare"
+ tplBlobExcerpt base.TplName = "repo/diff/blob_excerpt"
+ tplDiffBox base.TplName = "repo/diff/box"
+)
+
+// setCompareContext sets context data.
+func setCompareContext(ctx *context.Context, before, head *git.Commit, headOwner, headName string) {
+ ctx.Data["BeforeCommit"] = before
+ ctx.Data["HeadCommit"] = head
+
+ ctx.Data["GetBlobByPathForCommit"] = func(commit *git.Commit, path string) *git.Blob {
+ if commit == nil {
+ return nil
+ }
+
+ blob, err := commit.GetBlobByPath(path)
+ if err != nil {
+ return nil
+ }
+ return blob
+ }
+
+ ctx.Data["GetSniffedTypeForBlob"] = func(blob *git.Blob) typesniffer.SniffedType {
+ st := typesniffer.SniffedType{}
+
+ if blob == nil {
+ return st
+ }
+
+ st, err := blob.GuessContentType()
+ if err != nil {
+ log.Error("GuessContentType failed: %v", err)
+ return st
+ }
+ return st
+ }
+
+ setPathsCompareContext(ctx, before, head, headOwner, headName)
+ setImageCompareContext(ctx)
+ setCsvCompareContext(ctx)
+}
+
+// SourceCommitURL creates a relative URL for a commit in the given repository
+func SourceCommitURL(owner, name string, commit *git.Commit) string {
+ return setting.AppSubURL + "/" + url.PathEscape(owner) + "/" + url.PathEscape(name) + "/src/commit/" + url.PathEscape(commit.ID.String())
+}
+
+// RawCommitURL creates a relative URL for the raw commit in the given repository
+func RawCommitURL(owner, name string, commit *git.Commit) string {
+ return setting.AppSubURL + "/" + url.PathEscape(owner) + "/" + url.PathEscape(name) + "/raw/commit/" + url.PathEscape(commit.ID.String())
+}
+
+// setPathsCompareContext sets context data for source and raw paths
+func setPathsCompareContext(ctx *context.Context, base, head *git.Commit, headOwner, headName string) {
+ ctx.Data["SourcePath"] = SourceCommitURL(headOwner, headName, head)
+ ctx.Data["RawPath"] = RawCommitURL(headOwner, headName, head)
+ if base != nil {
+ ctx.Data["BeforeSourcePath"] = SourceCommitURL(headOwner, headName, base)
+ ctx.Data["BeforeRawPath"] = RawCommitURL(headOwner, headName, base)
+ }
+}
+
+// setImageCompareContext sets context data that is required by image compare template
+func setImageCompareContext(ctx *context.Context) {
+ ctx.Data["IsSniffedTypeAnImage"] = func(st typesniffer.SniffedType) bool {
+ return st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage())
+ }
+}
+
+// setCsvCompareContext sets context data that is required by the CSV compare template
+func setCsvCompareContext(ctx *context.Context) {
+ ctx.Data["IsCsvFile"] = func(diffFile *gitdiff.DiffFile) bool {
+ extension := strings.ToLower(filepath.Ext(diffFile.Name))
+ return extension == ".csv" || extension == ".tsv"
+ }
+
+ type CsvDiffResult struct {
+ Sections []*gitdiff.TableDiffSection
+ Error string
+ }
+
+ ctx.Data["CreateCsvDiff"] = func(diffFile *gitdiff.DiffFile, baseBlob, headBlob *git.Blob) CsvDiffResult {
+ if diffFile == nil {
+ return CsvDiffResult{nil, ""}
+ }
+
+ errTooLarge := errors.New(ctx.Locale.TrString("repo.error.csv.too_large"))
+
+ csvReaderFromCommit := func(ctx *markup.RenderContext, blob *git.Blob) (*csv.Reader, io.Closer, error) {
+ if blob == nil {
+ // It's ok for blob to be nil (file added or deleted)
+ return nil, nil, nil
+ }
+
+ if setting.UI.CSV.MaxFileSize != 0 && setting.UI.CSV.MaxFileSize < blob.Size() {
+ return nil, nil, errTooLarge
+ }
+
+ reader, err := blob.DataAsync()
+ if err != nil {
+ return nil, nil, err
+ }
+
+ csvReader, err := csv_module.CreateReaderAndDetermineDelimiter(ctx, charset.ToUTF8WithFallbackReader(reader, charset.ConvertOpts{}))
+ return csvReader, reader, err
+ }
+
+ baseReader, baseBlobCloser, err := csvReaderFromCommit(&markup.RenderContext{Ctx: ctx, RelativePath: diffFile.OldName}, baseBlob)
+ if baseBlobCloser != nil {
+ defer baseBlobCloser.Close()
+ }
+ if err != nil {
+ if err == errTooLarge {
+ return CsvDiffResult{nil, err.Error()}
+ }
+ log.Error("error whilst creating csv.Reader from file %s in base commit %s in %s: %v", diffFile.Name, baseBlob.ID.String(), ctx.Repo.Repository.Name, err)
+ return CsvDiffResult{nil, "unable to load file"}
+ }
+
+ headReader, headBlobCloser, err := csvReaderFromCommit(&markup.RenderContext{Ctx: ctx, RelativePath: diffFile.Name}, headBlob)
+ if headBlobCloser != nil {
+ defer headBlobCloser.Close()
+ }
+ if err != nil {
+ if err == errTooLarge {
+ return CsvDiffResult{nil, err.Error()}
+ }
+ log.Error("error whilst creating csv.Reader from file %s in head commit %s in %s: %v", diffFile.Name, headBlob.ID.String(), ctx.Repo.Repository.Name, err)
+ return CsvDiffResult{nil, "unable to load file"}
+ }
+
+ sections, err := gitdiff.CreateCsvDiff(diffFile, baseReader, headReader)
+ if err != nil {
+ errMessage, err := csv_module.FormatError(err, ctx.Locale)
+ if err != nil {
+ log.Error("CreateCsvDiff FormatError failed: %v", err)
+ return CsvDiffResult{nil, "unknown csv diff error"}
+ }
+ return CsvDiffResult{nil, errMessage}
+ }
+ return CsvDiffResult{sections, ""}
+ }
+}
+
+// ParseCompareInfo parse compare info between two commit for preparing comparing references
+func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
+ baseRepo := ctx.Repo.Repository
+ ci := &common.CompareInfo{}
+
+ fileOnly := ctx.FormBool("file-only")
+
+ // Get compared branches information
+ // A full compare url is of the form:
+ //
+ // 1. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headBranch}
+ // 2. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}:{:headBranch}
+ // 3. /{:baseOwner}/{:baseRepoName}/compare/{:baseBranch}...{:headOwner}/{:headRepoName}:{:headBranch}
+ // 4. /{:baseOwner}/{:baseRepoName}/compare/{:headBranch}
+ // 5. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}:{:headBranch}
+ // 6. /{:baseOwner}/{:baseRepoName}/compare/{:headOwner}/{:headRepoName}:{:headBranch}
+ //
+ // Here we obtain the infoPath "{:baseBranch}...[{:headOwner}/{:headRepoName}:]{:headBranch}" as ctx.Params("*")
+ // with the :baseRepo in ctx.Repo.
+ //
+ // Note: Generally :headRepoName is not provided here - we are only passed :headOwner.
+ //
+ // How do we determine the :headRepo?
+ //
+ // 1. If :headOwner is not set then the :headRepo = :baseRepo
+ // 2. If :headOwner is set - then look for the fork of :baseRepo owned by :headOwner
+ // 3. But... :baseRepo could be a fork of :headOwner's repo - so check that
+ // 4. Now, :baseRepo and :headRepos could be forks of the same repo - so check that
+ //
+ // format: <base branch>...[<head repo>:]<head branch>
+ // base<-head: master...head:feature
+ // same repo: master...feature
+
+ var (
+ isSameRepo bool
+ infoPath string
+ err error
+ )
+
+ infoPath = ctx.Params("*")
+ var infos []string
+ if infoPath == "" {
+ infos = []string{baseRepo.DefaultBranch, baseRepo.DefaultBranch}
+ } else {
+ infos = strings.SplitN(infoPath, "...", 2)
+ if len(infos) != 2 {
+ if infos = strings.SplitN(infoPath, "..", 2); len(infos) == 2 {
+ ci.DirectComparison = true
+ ctx.Data["PageIsComparePull"] = false
+ } else {
+ infos = []string{baseRepo.DefaultBranch, infoPath}
+ }
+ }
+ }
+
+ ctx.Data["BaseName"] = baseRepo.OwnerName
+ ci.BaseBranch = infos[0]
+ ctx.Data["BaseBranch"] = ci.BaseBranch
+
+ // If there is no head repository, it means compare between same repository.
+ headInfos := strings.Split(infos[1], ":")
+ if len(headInfos) == 1 {
+ isSameRepo = true
+ ci.HeadUser = ctx.Repo.Owner
+ ci.HeadBranch = headInfos[0]
+ } else if len(headInfos) == 2 {
+ headInfosSplit := strings.Split(headInfos[0], "/")
+ if len(headInfosSplit) == 1 {
+ ci.HeadUser, err = user_model.GetUserByName(ctx, headInfos[0])
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("GetUserByName", nil)
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return nil
+ }
+ ci.HeadBranch = headInfos[1]
+ isSameRepo = ci.HeadUser.ID == ctx.Repo.Owner.ID
+ if isSameRepo {
+ ci.HeadRepo = baseRepo
+ }
+ } else {
+ ci.HeadRepo, err = repo_model.GetRepositoryByOwnerAndName(ctx, headInfosSplit[0], headInfosSplit[1])
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.NotFound("GetRepositoryByOwnerAndName", nil)
+ } else {
+ ctx.ServerError("GetRepositoryByOwnerAndName", err)
+ }
+ return nil
+ }
+ if err := ci.HeadRepo.LoadOwner(ctx); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("GetUserByName", nil)
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return nil
+ }
+ ci.HeadBranch = headInfos[1]
+ ci.HeadUser = ci.HeadRepo.Owner
+ isSameRepo = ci.HeadRepo.ID == ctx.Repo.Repository.ID
+ }
+ } else {
+ ctx.NotFound("CompareAndPullRequest", nil)
+ return nil
+ }
+ ctx.Data["HeadUser"] = ci.HeadUser
+ ctx.Data["HeadBranch"] = ci.HeadBranch
+ ctx.Repo.PullRequest.SameRepo = isSameRepo
+
+ // Check if base branch is valid.
+ baseIsCommit := ctx.Repo.GitRepo.IsCommitExist(ci.BaseBranch)
+ baseIsBranch := ctx.Repo.GitRepo.IsBranchExist(ci.BaseBranch)
+ baseIsTag := ctx.Repo.GitRepo.IsTagExist(ci.BaseBranch)
+
+ if !baseIsCommit && !baseIsBranch && !baseIsTag {
+ // Check if baseBranch is short sha commit hash
+ if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(ci.BaseBranch); baseCommit != nil {
+ ci.BaseBranch = baseCommit.ID.String()
+ ctx.Data["BaseBranch"] = ci.BaseBranch
+ baseIsCommit = true
+ } else if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
+ if isSameRepo {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
+ }
+ return nil
+ } else {
+ ctx.NotFound("IsRefExist", nil)
+ return nil
+ }
+ }
+ ctx.Data["BaseIsCommit"] = baseIsCommit
+ ctx.Data["BaseIsBranch"] = baseIsBranch
+ ctx.Data["BaseIsTag"] = baseIsTag
+ ctx.Data["IsPull"] = true
+
+ // Now we have the repository that represents the base
+
+ // The current base and head repositories and branches may not
+ // actually be the intended branches that the user wants to
+ // create a pull-request from - but also determining the head
+ // repo is difficult.
+
+ // We will want therefore to offer a few repositories to set as
+ // our base and head
+
+ // 1. First if the baseRepo is a fork get the "RootRepo" it was
+ // forked from
+ var rootRepo *repo_model.Repository
+ if baseRepo.IsFork {
+ err = baseRepo.GetBaseRepo(ctx)
+ if err != nil {
+ if !repo_model.IsErrRepoNotExist(err) {
+ ctx.ServerError("Unable to find root repo", err)
+ return nil
+ }
+ } else {
+ rootRepo = baseRepo.BaseRepo
+ }
+ }
+
+ // 2. Now if the current user is not the owner of the baseRepo,
+ // check if they have a fork of the base repo and offer that as
+ // "OwnForkRepo"
+ var ownForkRepo *repo_model.Repository
+ if ctx.Doer != nil && baseRepo.OwnerID != ctx.Doer.ID {
+ repo := repo_model.GetForkedRepo(ctx, ctx.Doer.ID, baseRepo.ID)
+ if repo != nil {
+ ownForkRepo = repo
+ ctx.Data["OwnForkRepo"] = ownForkRepo
+ }
+ }
+
+ has := ci.HeadRepo != nil
+ // 3. If the base is a forked from "RootRepo" and the owner of
+ // the "RootRepo" is the :headUser - set headRepo to that
+ if !has && rootRepo != nil && rootRepo.OwnerID == ci.HeadUser.ID {
+ ci.HeadRepo = rootRepo
+ has = true
+ }
+
+ // 4. If the ctx.Doer has their own fork of the baseRepo and the headUser is the ctx.Doer
+ // set the headRepo to the ownFork
+ if !has && ownForkRepo != nil && ownForkRepo.OwnerID == ci.HeadUser.ID {
+ ci.HeadRepo = ownForkRepo
+ has = true
+ }
+
+ // 5. If the headOwner has a fork of the baseRepo - use that
+ if !has {
+ ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ID)
+ has = ci.HeadRepo != nil
+ }
+
+ // 6. If the baseRepo is a fork and the headUser has a fork of that use that
+ if !has && baseRepo.IsFork {
+ ci.HeadRepo = repo_model.GetForkedRepo(ctx, ci.HeadUser.ID, baseRepo.ForkID)
+ has = ci.HeadRepo != nil
+ }
+
+ // 7. Otherwise if we're not the same repo and haven't found a repo give up
+ if !isSameRepo && !has {
+ ctx.Data["PageIsComparePull"] = false
+ }
+
+ // 8. Finally open the git repo
+ if isSameRepo {
+ ci.HeadRepo = ctx.Repo.Repository
+ ci.HeadGitRepo = ctx.Repo.GitRepo
+ } else if has {
+ ci.HeadGitRepo, err = gitrepo.OpenRepository(ctx, ci.HeadRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer ci.HeadGitRepo.Close()
+ } else {
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ ctx.Data["HeadRepo"] = ci.HeadRepo
+ ctx.Data["BaseCompareRepo"] = ctx.Repo.Repository
+
+ // Now we need to assert that the ctx.Doer has permission to read
+ // the baseRepo's code and pulls
+ // (NOT headRepo's)
+ permBase, err := access_model.GetUserRepoPermission(ctx, baseRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+ if !permBase.CanRead(unit.TypeCode) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in baseRepo has Permissions: %-+v",
+ ctx.Doer,
+ baseRepo,
+ permBase)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ // If we're not merging from the same repo:
+ if !isSameRepo {
+ // Assert ctx.Doer has permission to read headRepo's codes
+ permHead, err := access_model.GetUserRepoPermission(ctx, ci.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+ if !permHead.CanRead(unit.TypeCode) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot read code in Repo: %-v\nUser in headRepo has Permissions: %-+v",
+ ctx.Doer,
+ ci.HeadRepo,
+ permHead)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+ ctx.Data["CanWriteToHeadRepo"] = permHead.CanWrite(unit.TypeCode)
+ }
+
+ // If we have a rootRepo and it's different from:
+ // 1. the computed base
+ // 2. the computed head
+ // then get the branches of it
+ if rootRepo != nil &&
+ rootRepo.ID != ci.HeadRepo.ID &&
+ rootRepo.ID != baseRepo.ID {
+ canRead := access_model.CheckRepoUnitUser(ctx, rootRepo, ctx.Doer, unit.TypeCode)
+ if canRead {
+ ctx.Data["RootRepo"] = rootRepo
+ if !fileOnly {
+ branches, tags, err := getBranchesAndTagsForRepo(ctx, rootRepo)
+ if err != nil {
+ ctx.ServerError("GetBranchesForRepo", err)
+ return nil
+ }
+
+ ctx.Data["RootRepoBranches"] = branches
+ ctx.Data["RootRepoTags"] = tags
+ }
+ }
+ }
+
+ // If we have a ownForkRepo and it's different from:
+ // 1. The computed base
+ // 2. The computed head
+ // 3. The rootRepo (if we have one)
+ // then get the branches from it.
+ if ownForkRepo != nil &&
+ ownForkRepo.ID != ci.HeadRepo.ID &&
+ ownForkRepo.ID != baseRepo.ID &&
+ (rootRepo == nil || ownForkRepo.ID != rootRepo.ID) {
+ canRead := access_model.CheckRepoUnitUser(ctx, ownForkRepo, ctx.Doer, unit.TypeCode)
+ if canRead {
+ ctx.Data["OwnForkRepo"] = ownForkRepo
+ if !fileOnly {
+ branches, tags, err := getBranchesAndTagsForRepo(ctx, ownForkRepo)
+ if err != nil {
+ ctx.ServerError("GetBranchesForRepo", err)
+ return nil
+ }
+ ctx.Data["OwnForkRepoBranches"] = branches
+ ctx.Data["OwnForkRepoTags"] = tags
+ }
+ }
+ }
+
+ // Check if head branch is valid.
+ headIsCommit := ci.HeadGitRepo.IsCommitExist(ci.HeadBranch)
+ headIsBranch := ci.HeadGitRepo.IsBranchExist(ci.HeadBranch)
+ headIsTag := ci.HeadGitRepo.IsTagExist(ci.HeadBranch)
+ if !headIsCommit && !headIsBranch && !headIsTag {
+ // Check if headBranch is short sha commit hash
+ if headCommit, _ := ci.HeadGitRepo.GetCommit(ci.HeadBranch); headCommit != nil {
+ ci.HeadBranch = headCommit.ID.String()
+ ctx.Data["HeadBranch"] = ci.HeadBranch
+ headIsCommit = true
+ } else {
+ ctx.NotFound("IsRefExist", nil)
+ return nil
+ }
+ }
+ ctx.Data["HeadIsCommit"] = headIsCommit
+ ctx.Data["HeadIsBranch"] = headIsBranch
+ ctx.Data["HeadIsTag"] = headIsTag
+
+ // Treat as pull request if both references are branches
+ if ctx.Data["PageIsComparePull"] == nil {
+ ctx.Data["PageIsComparePull"] = headIsBranch && baseIsBranch
+ }
+
+ if ctx.Data["PageIsComparePull"] == true && !permBase.CanReadIssuesOrPulls(true) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User: %-v cannot create/read pull requests in Repo: %-v\nUser in baseRepo has Permissions: %-+v",
+ ctx.Doer,
+ baseRepo,
+ permBase)
+ }
+ ctx.NotFound("ParseCompareInfo", nil)
+ return nil
+ }
+
+ baseBranchRef := ci.BaseBranch
+ if baseIsBranch {
+ baseBranchRef = git.BranchPrefix + ci.BaseBranch
+ } else if baseIsTag {
+ baseBranchRef = git.TagPrefix + ci.BaseBranch
+ }
+ headBranchRef := ci.HeadBranch
+ if headIsBranch {
+ headBranchRef = git.BranchPrefix + ci.HeadBranch
+ } else if headIsTag {
+ headBranchRef = git.TagPrefix + ci.HeadBranch
+ }
+
+ ci.CompareInfo, err = ci.HeadGitRepo.GetCompareInfo(baseRepo.RepoPath(), baseBranchRef, headBranchRef, ci.DirectComparison, fileOnly)
+ if err != nil {
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+ if ci.DirectComparison {
+ ctx.Data["BeforeCommitID"] = ci.CompareInfo.BaseCommitID
+ } else {
+ ctx.Data["BeforeCommitID"] = ci.CompareInfo.MergeBase
+ }
+
+ return ci
+}
+
+// PrepareCompareDiff renders compare diff page
+func PrepareCompareDiff(
+ ctx *context.Context,
+ ci *common.CompareInfo,
+ whitespaceBehavior git.TrustedCmdArgs,
+) bool {
+ var (
+ repo = ctx.Repo.Repository
+ err error
+ title string
+ )
+
+ // Get diff information.
+ ctx.Data["CommitRepoLink"] = ci.HeadRepo.Link()
+
+ headCommitID := ci.CompareInfo.HeadCommitID
+
+ ctx.Data["AfterCommitID"] = headCommitID
+
+ if (headCommitID == ci.CompareInfo.MergeBase && !ci.DirectComparison) ||
+ headCommitID == ci.CompareInfo.BaseCommitID {
+ ctx.Data["IsNothingToCompare"] = true
+ if unit, err := repo.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+
+ if !config.AutodetectManualMerge {
+ allowEmptyPr := !(ci.BaseBranch == ci.HeadBranch && ctx.Repo.Repository.Name == ci.HeadRepo.Name)
+ ctx.Data["AllowEmptyPr"] = allowEmptyPr
+
+ return !allowEmptyPr
+ }
+
+ ctx.Data["AllowEmptyPr"] = false
+ }
+ return true
+ }
+
+ beforeCommitID := ci.CompareInfo.MergeBase
+ if ci.DirectComparison {
+ beforeCommitID = ci.CompareInfo.BaseCommitID
+ }
+
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if len(files) == 2 || len(files) == 1 {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diff, err := gitdiff.GetDiff(ctx, ci.HeadGitRepo,
+ &gitdiff.DiffOptions{
+ BeforeCommitID: beforeCommitID,
+ AfterCommitID: headCommitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: whitespaceBehavior,
+ DirectComparison: ci.DirectComparison,
+ }, ctx.FormStrings("files")...)
+ if err != nil {
+ ctx.ServerError("GetDiffRangeWithWhitespaceBehavior", err)
+ return false
+ }
+ ctx.Data["Diff"] = diff
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ headCommit, err := ci.HeadGitRepo.GetCommit(headCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return false
+ }
+
+ baseGitRepo := ctx.Repo.GitRepo
+
+ beforeCommit, err := baseGitRepo.GetCommit(beforeCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return false
+ }
+
+ commits := processGitCommits(ctx, ci.CompareInfo.Commits)
+ ctx.Data["Commits"] = commits
+ ctx.Data["CommitCount"] = len(commits)
+
+ if len(commits) == 1 {
+ c := commits[0]
+ title = strings.TrimSpace(c.UserCommit.Summary())
+
+ body := strings.Split(strings.TrimSpace(c.UserCommit.Message()), "\n")
+ if len(body) > 1 {
+ ctx.Data["content"] = strings.Join(body[1:], "\n")
+ }
+ } else {
+ title = ci.HeadBranch
+ }
+ if len(title) > 255 {
+ var trailer string
+ title, trailer = util.SplitStringAtByteN(title, 255)
+ if len(trailer) > 0 {
+ if ctx.Data["content"] != nil {
+ ctx.Data["content"] = fmt.Sprintf("%s\n\n%s", trailer, ctx.Data["content"])
+ } else {
+ ctx.Data["content"] = trailer + "\n"
+ }
+ }
+ }
+
+ ctx.Data["title"] = title
+ ctx.Data["Username"] = ci.HeadUser.Name
+ ctx.Data["Reponame"] = ci.HeadRepo.Name
+
+ setCompareContext(ctx, beforeCommit, headCommit, ci.HeadUser.Name, ci.HeadRepo.Name)
+
+ return false
+}
+
+func getBranchesAndTagsForRepo(ctx gocontext.Context, repo *repo_model.Repository) (branches, tags []string, err error) {
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ return nil, nil, err
+ }
+ defer gitRepo.Close()
+
+ branches, err = git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: repo.ID,
+ ListOptions: db.ListOptionsAll,
+ IsDeletedBranch: optional.Some(false),
+ })
+ if err != nil {
+ return nil, nil, err
+ }
+ tags, err = gitRepo.GetTags(0, 0)
+ if err != nil {
+ return nil, nil, err
+ }
+ return branches, tags, nil
+}
+
+// CompareDiff show different from one commit to another commit
+func CompareDiff(ctx *context.Context) {
+ ci := ParseCompareInfo(ctx)
+ defer func() {
+ if ci != nil && ci.HeadGitRepo != nil {
+ ci.HeadGitRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["DirectComparison"] = ci.DirectComparison
+ ctx.Data["OtherCompareSeparator"] = ".."
+ ctx.Data["CompareSeparator"] = "..."
+ if ci.DirectComparison {
+ ctx.Data["CompareSeparator"] = ".."
+ ctx.Data["OtherCompareSeparator"] = "..."
+ }
+
+ nothingToCompare := PrepareCompareDiff(ctx, ci,
+ gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)))
+ if ctx.Written() {
+ return
+ }
+
+ baseTags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = baseTags
+
+ fileOnly := ctx.FormBool("file-only")
+ if fileOnly {
+ ctx.HTML(http.StatusOK, tplDiffBox)
+ return
+ }
+
+ headBranches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: ci.HeadRepo.ID,
+ ListOptions: db.ListOptionsAll,
+ IsDeletedBranch: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("GetBranches", err)
+ return
+ }
+ ctx.Data["HeadBranches"] = headBranches
+
+ // For compare repo branches
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ headTags, err := repo_model.GetTagNamesByRepoID(ctx, ci.HeadRepo.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["HeadTags"] = headTags
+
+ if ctx.Data["PageIsComparePull"] == true {
+ pr, err := issues_model.GetUnmergedPullRequest(ctx, ci.HeadRepo.ID, ctx.Repo.Repository.ID, ci.HeadBranch, ci.BaseBranch, issues_model.PullRequestFlowGithub)
+ if err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ ctx.ServerError("GetUnmergedPullRequest", err)
+ return
+ }
+ } else {
+ ctx.Data["HasPullRequest"] = true
+ if err := pr.LoadIssue(ctx); err != nil {
+ ctx.ServerError("LoadIssue", err)
+ return
+ }
+ ctx.Data["PullRequest"] = pr
+ ctx.HTML(http.StatusOK, tplCompareDiff)
+ return
+ }
+
+ if !nothingToCompare {
+ // Setup information for new form.
+ RetrieveRepoMetas(ctx, ctx.Repo.Repository, true)
+ if ctx.Written() {
+ return
+ }
+ }
+ }
+ beforeCommitID := ctx.Data["BeforeCommitID"].(string)
+ afterCommitID := ctx.Data["AfterCommitID"].(string)
+
+ separator := "..."
+ if ci.DirectComparison {
+ separator = ".."
+ }
+ ctx.Data["Title"] = "Comparing " + base.ShortSha(beforeCommitID) + separator + base.ShortSha(afterCommitID)
+
+ ctx.Data["IsDiffCompare"] = true
+ _, templateErrs := setTemplateIfExists(ctx, pullRequestTemplateKey, pullRequestTemplateCandidates)
+
+ if len(templateErrs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, templateErrs), true)
+ }
+
+ if content, ok := ctx.Data["content"].(string); ok && content != "" {
+ // If a template content is set, prepend the "content". In this case that's only
+ // applicable if you have one commit to compare and that commit has a message.
+ // In that case the commit message will be prepend to the template body.
+ if templateContent, ok := ctx.Data[pullRequestTemplateKey].(string); ok && templateContent != "" {
+ // Reuse the same key as that's prioritized over the "content" key.
+ // Add two new lines between the content to ensure there's always at least
+ // one empty line between them.
+ ctx.Data[pullRequestTemplateKey] = content + "\n\n" + templateContent
+ }
+
+ // When using form fields, also add content to field with id "body".
+ if fields, ok := ctx.Data["Fields"].([]*api.IssueFormField); ok {
+ for _, field := range fields {
+ if field.ID == "body" {
+ if fieldValue, ok := field.Attributes["value"].(string); ok && fieldValue != "" {
+ field.Attributes["value"] = content + "\n\n" + fieldValue
+ } else {
+ field.Attributes["value"] = content
+ }
+ }
+ }
+ }
+ }
+
+ ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanWrite(unit.TypeProjects)
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypePullRequests)
+
+ if unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypePullRequests); err == nil {
+ config := unit.PullRequestsConfig()
+ ctx.Data["AllowMaintainerEdit"] = config.DefaultAllowMaintainerEdit
+ } else {
+ ctx.Data["AllowMaintainerEdit"] = false
+ }
+
+ ctx.HTML(http.StatusOK, tplCompare)
+}
+
+// ExcerptBlob render blob excerpt contents
+func ExcerptBlob(ctx *context.Context) {
+ commitID := ctx.Params("sha")
+ lastLeft := ctx.FormInt("last_left")
+ lastRight := ctx.FormInt("last_right")
+ idxLeft := ctx.FormInt("left")
+ idxRight := ctx.FormInt("right")
+ leftHunkSize := ctx.FormInt("left_hunk_size")
+ rightHunkSize := ctx.FormInt("right_hunk_size")
+ anchor := ctx.FormString("anchor")
+ direction := ctx.FormString("direction")
+ filePath := ctx.FormString("path")
+ gitRepo := ctx.Repo.GitRepo
+ if ctx.FormBool("wiki") {
+ var err error
+ gitRepo, err = gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return
+ }
+ defer gitRepo.Close()
+ }
+ chunkSize := gitdiff.BlobExcerptChunkSize
+ commit, err := gitRepo.GetCommit(commitID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetCommit")
+ return
+ }
+ section := &gitdiff.DiffSection{
+ FileName: filePath,
+ Name: filePath,
+ }
+ if direction == "up" && (idxLeft-lastLeft) > chunkSize {
+ idxLeft -= chunkSize
+ idxRight -= chunkSize
+ leftHunkSize += chunkSize
+ rightHunkSize += chunkSize
+ section.Lines, err = getExcerptLines(commit, filePath, idxLeft-1, idxRight-1, chunkSize)
+ } else if direction == "down" && (idxLeft-lastLeft) > chunkSize {
+ section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, chunkSize)
+ lastLeft += chunkSize
+ lastRight += chunkSize
+ } else {
+ offset := -1
+ if direction == "down" {
+ offset = 0
+ }
+ section.Lines, err = getExcerptLines(commit, filePath, lastLeft, lastRight, idxRight-lastRight+offset)
+ leftHunkSize = 0
+ rightHunkSize = 0
+ idxLeft = lastLeft
+ idxRight = lastRight
+ }
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "getExcerptLines")
+ return
+ }
+ if idxRight > lastRight {
+ lineText := " "
+ if rightHunkSize > 0 || leftHunkSize > 0 {
+ lineText = fmt.Sprintf("@@ -%d,%d +%d,%d @@\n", idxLeft, leftHunkSize, idxRight, rightHunkSize)
+ }
+ lineText = html.EscapeString(lineText)
+ lineSection := &gitdiff.DiffLine{
+ Type: gitdiff.DiffLineSection,
+ Content: lineText,
+ SectionInfo: &gitdiff.DiffLineSectionInfo{
+ Path: filePath,
+ LastLeftIdx: lastLeft,
+ LastRightIdx: lastRight,
+ LeftIdx: idxLeft,
+ RightIdx: idxRight,
+ LeftHunkSize: leftHunkSize,
+ RightHunkSize: rightHunkSize,
+ },
+ }
+ if direction == "up" {
+ section.Lines = append([]*gitdiff.DiffLine{lineSection}, section.Lines...)
+ } else if direction == "down" {
+ section.Lines = append(section.Lines, lineSection)
+ }
+ }
+ ctx.Data["section"] = section
+ ctx.Data["FileNameHash"] = git.HashFilePathForWebUI(filePath)
+ ctx.Data["AfterCommitID"] = commitID
+ ctx.Data["Anchor"] = anchor
+ ctx.HTML(http.StatusOK, tplBlobExcerpt)
+}
+
+func getExcerptLines(commit *git.Commit, filePath string, idxLeft, idxRight, chunkSize int) ([]*gitdiff.DiffLine, error) {
+ blob, err := commit.Tree.GetBlobByPath(filePath)
+ if err != nil {
+ return nil, err
+ }
+ reader, err := blob.DataAsync()
+ if err != nil {
+ return nil, err
+ }
+ defer reader.Close()
+ scanner := bufio.NewScanner(reader)
+ var diffLines []*gitdiff.DiffLine
+ for line := 0; line < idxRight+chunkSize; line++ {
+ if ok := scanner.Scan(); !ok {
+ break
+ }
+ if line < idxRight {
+ continue
+ }
+ lineText := scanner.Text()
+ diffLine := &gitdiff.DiffLine{
+ LeftIdx: idxLeft + (line - idxRight) + 1,
+ RightIdx: line + 1,
+ Type: gitdiff.DiffLinePlain,
+ Content: " " + lineText,
+ }
+ diffLines = append(diffLines, diffLine)
+ }
+ if err = scanner.Err(); err != nil {
+ return nil, fmt.Errorf("getExcerptLines scan: %w", err)
+ }
+ return diffLines, nil
+}
diff --git a/routers/web/repo/contributors.go b/routers/web/repo/contributors.go
new file mode 100644
index 0000000..762fbf9
--- /dev/null
+++ b/routers/web/repo/contributors.go
@@ -0,0 +1,38 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplContributors base.TplName = "repo/activity"
+)
+
+// Contributors render the page to show repository contributors graph
+func Contributors(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.contributors")
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsContributors"] = true
+ ctx.HTML(http.StatusOK, tplContributors)
+}
+
+// ContributorsData renders JSON of contributors along with their weekly commit statistics
+func ContributorsData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("GetContributorStats", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats)
+ }
+}
diff --git a/routers/web/repo/download.go b/routers/web/repo/download.go
new file mode 100644
index 0000000..c4a8bae
--- /dev/null
+++ b/routers/web/repo/download.go
@@ -0,0 +1,170 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "path"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+)
+
+// ServeBlobOrLFS download a git.Blob redirecting to LFS if necessary
+func ServeBlobOrLFS(ctx *context.Context, blob *git.Blob, lastModified *time.Time) error {
+ if httpcache.HandleGenericETagTimeCache(ctx.Req, ctx.Resp, `"`+blob.ID.String()+`"`, lastModified) {
+ return nil
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ return err
+ }
+ closed := false
+ defer func() {
+ if closed {
+ return
+ }
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ }()
+
+ pointer, _ := lfs.ReadPointer(dataRc)
+ if pointer.IsValid() {
+ meta, _ := git_model.GetLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, pointer.Oid)
+ if meta == nil {
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ closed = true
+ return common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified)
+ }
+ if httpcache.HandleGenericETagCache(ctx.Req, ctx.Resp, `"`+pointer.Oid+`"`) {
+ return nil
+ }
+
+ if setting.LFS.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.LFS.URL(pointer.RelativePath(), blob.Name())
+ if u != nil && err == nil {
+ ctx.Redirect(u.String())
+ return nil
+ }
+ }
+
+ lfsDataRc, err := lfs.ReadMetaObject(meta.Pointer)
+ if err != nil {
+ return err
+ }
+ defer func() {
+ if err = lfsDataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ }()
+ common.ServeContentByReadSeeker(ctx.Base, ctx.Repo.TreePath, lastModified, lfsDataRc)
+ return nil
+ }
+ if err = dataRc.Close(); err != nil {
+ log.Error("ServeBlobOrLFS: Close: %v", err)
+ }
+ closed = true
+
+ return common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified)
+}
+
+func getBlobForEntry(ctx *context.Context) (blob *git.Blob, lastModified *time.Time) {
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetTreeEntryByPath", err)
+ } else {
+ ctx.ServerError("GetTreeEntryByPath", err)
+ }
+ return nil, nil
+ }
+
+ if entry.IsDir() || entry.IsSubModule() {
+ ctx.NotFound("getBlobForEntry", nil)
+ return nil, nil
+ }
+
+ info, _, err := git.Entries([]*git.TreeEntry{entry}).GetCommitsInfo(ctx, ctx.Repo.Commit, path.Dir("/" + ctx.Repo.TreePath)[1:])
+ if err != nil {
+ ctx.ServerError("GetCommitsInfo", err)
+ return nil, nil
+ }
+
+ if len(info) == 1 {
+ // Not Modified
+ lastModified = &info[0].Commit.Committer.When
+ }
+ blob = entry.Blob()
+
+ return blob, lastModified
+}
+
+// SingleDownload download a file by repos path
+func SingleDownload(ctx *context.Context) {
+ blob, lastModified := getBlobForEntry(ctx)
+ if blob == nil {
+ return
+ }
+
+ if err := common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, lastModified); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
+
+// SingleDownloadOrLFS download a file by repos path redirecting to LFS if necessary
+func SingleDownloadOrLFS(ctx *context.Context) {
+ blob, lastModified := getBlobForEntry(ctx)
+ if blob == nil {
+ return
+ }
+
+ if err := ServeBlobOrLFS(ctx, blob, lastModified); err != nil {
+ ctx.ServerError("ServeBlobOrLFS", err)
+ }
+}
+
+// DownloadByID download a file by sha1 ID
+func DownloadByID(ctx *context.Context) {
+ blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlob", nil)
+ } else {
+ ctx.ServerError("GetBlob", err)
+ }
+ return
+ }
+ if err = common.ServeBlob(ctx.Base, ctx.Repo.TreePath, blob, nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
+
+// DownloadByIDOrLFS download a file by sha1 ID taking account of LFS
+func DownloadByIDOrLFS(ctx *context.Context) {
+ blob, err := ctx.Repo.GitRepo.GetBlob(ctx.Params("sha"))
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlob", nil)
+ } else {
+ ctx.ServerError("GetBlob", err)
+ }
+ return
+ }
+ if err = ServeBlobOrLFS(ctx, blob, nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+}
diff --git a/routers/web/repo/editor.go b/routers/web/repo/editor.go
new file mode 100644
index 0000000..00c3d88
--- /dev/null
+++ b/routers/web/repo/editor.go
@@ -0,0 +1,962 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "io"
+ "net/http"
+ "path"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ files_service "code.gitea.io/gitea/services/repository/files"
+)
+
+const (
+ tplEditFile base.TplName = "repo/editor/edit"
+ tplEditDiffPreview base.TplName = "repo/editor/diff_preview"
+ tplDeleteFile base.TplName = "repo/editor/delete"
+ tplUploadFile base.TplName = "repo/editor/upload"
+
+ frmCommitChoiceDirect string = "direct"
+ frmCommitChoiceNewBranch string = "commit-to-new-branch"
+)
+
+func canCreateBasePullRequest(ctx *context.Context) bool {
+ baseRepo := ctx.Repo.Repository.BaseRepo
+ return baseRepo != nil && baseRepo.UnitEnabled(ctx, unit.TypePullRequests)
+}
+
+func renderCommitRights(ctx *context.Context) bool {
+ canCommitToBranch, err := ctx.Repo.CanCommitToBranch(ctx, ctx.Doer)
+ if err != nil {
+ log.Error("CanCommitToBranch: %v", err)
+ }
+ ctx.Data["CanCommitToBranch"] = canCommitToBranch
+ ctx.Data["CanCreatePullRequest"] = ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) || canCreateBasePullRequest(ctx)
+
+ return canCommitToBranch.CanCommitToBranch
+}
+
+// redirectForCommitChoice redirects after committing the edit to a branch
+func redirectForCommitChoice(ctx *context.Context, commitChoice, newBranchName, treePath string) {
+ if commitChoice == frmCommitChoiceNewBranch {
+ // Redirect to a pull request when possible
+ redirectToPullRequest := false
+ repo := ctx.Repo.Repository
+ baseBranch := ctx.Repo.BranchName
+ headBranch := newBranchName
+ if repo.UnitEnabled(ctx, unit.TypePullRequests) {
+ redirectToPullRequest = true
+ } else if canCreateBasePullRequest(ctx) {
+ redirectToPullRequest = true
+ baseBranch = repo.BaseRepo.DefaultBranch
+ headBranch = repo.Owner.Name + "/" + repo.Name + ":" + headBranch
+ repo = repo.BaseRepo
+ }
+
+ if redirectToPullRequest {
+ ctx.Redirect(repo.Link() + "/compare/" + util.PathEscapeSegments(baseBranch) + "..." + util.PathEscapeSegments(headBranch))
+ return
+ }
+ }
+
+ // Redirect to viewing file or folder
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(newBranchName) + "/" + util.PathEscapeSegments(treePath))
+}
+
+// getParentTreeFields returns list of parent tree names and corresponding tree paths
+// based on given tree path.
+func getParentTreeFields(treePath string) (treeNames, treePaths []string) {
+ if len(treePath) == 0 {
+ return treeNames, treePaths
+ }
+
+ treeNames = strings.Split(treePath, "/")
+ treePaths = make([]string, len(treeNames))
+ for i := range treeNames {
+ treePaths[i] = strings.Join(treeNames[:i+1], "/")
+ }
+ return treeNames, treePaths
+}
+
+// getSelectableEmailAddresses returns which emails can be used by the user as
+// email for a Git commiter.
+func getSelectableEmailAddresses(ctx *context.Context) ([]*user_model.ActivatedEmailAddress, error) {
+ // Retrieve emails that the user could use for commiter identity.
+ commitEmails, err := user_model.GetActivatedEmailAddresses(ctx, ctx.Doer.ID)
+ if err != nil {
+ return nil, fmt.Errorf("GetActivatedEmailAddresses: %w", err)
+ }
+
+ // Allow for the placeholder mail to be used. Use -1 as ID to identify
+ // this entry to be the placerholder mail of the user.
+ placeholderMail := &user_model.ActivatedEmailAddress{ID: -1, Email: ctx.Doer.GetPlaceholderEmail()}
+ if ctx.Doer.KeepEmailPrivate {
+ commitEmails = append([]*user_model.ActivatedEmailAddress{placeholderMail}, commitEmails...)
+ } else {
+ commitEmails = append(commitEmails, placeholderMail)
+ }
+
+ return commitEmails, nil
+}
+
+// CommonEditorData sets common context data that is used by the editor.
+func CommonEditorData(ctx *context.Context) {
+ // Set context for selectable email addresses.
+ commitEmails, err := getSelectableEmailAddresses(ctx)
+ if err != nil {
+ ctx.ServerError("getSelectableEmailAddresses", err)
+ return
+ }
+ ctx.Data["CommitMails"] = commitEmails
+ ctx.Data["DefaultCommitMail"] = ctx.Doer.GetEmail()
+}
+
+func editFile(ctx *context.Context, isNewFile bool) {
+ ctx.Data["PageIsEdit"] = true
+ ctx.Data["IsNewFile"] = isNewFile
+ canCommit := renderCommitRights(ctx)
+
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if treePath != ctx.Repo.TreePath {
+ if isNewFile {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_new", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ } else {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_edit", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ }
+ return
+ }
+
+ // Check if the filename (and additional path) is specified in the querystring
+ // (filename is a misnomer, but kept for compatibility with GitHub)
+ filePath, fileName := path.Split(ctx.Req.URL.Query().Get("filename"))
+ filePath = strings.Trim(filePath, "/")
+ treeNames, treePaths := getParentTreeFields(path.Join(ctx.Repo.TreePath, filePath))
+
+ if !isNewFile {
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ // No way to edit a directory online.
+ if entry.IsDir() {
+ ctx.NotFound("entry.IsDir", nil)
+ return
+ }
+
+ blob := entry.Blob()
+ if blob.Size() >= setting.UI.MaxDisplayFileSize {
+ ctx.NotFound("blob.Size", err)
+ return
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ ctx.NotFound("blob.Data", err)
+ return
+ }
+
+ defer dataRc.Close()
+
+ ctx.Data["FileSize"] = blob.Size()
+ ctx.Data["FileName"] = blob.Name()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ // Only some file types are editable online as text.
+ if !typesniffer.DetectContentType(buf).IsRepresentableAsText() {
+ ctx.NotFound("typesniffer.IsRepresentableAsText", nil)
+ return
+ }
+
+ d, _ := io.ReadAll(dataRc)
+
+ buf = append(buf, d...)
+ if content, err := charset.ToUTF8(buf, charset.ConvertOpts{KeepBOM: true}); err != nil {
+ log.Error("ToUTF8: %v", err)
+ ctx.Data["FileContent"] = string(buf)
+ } else {
+ ctx.Data["FileContent"] = content
+ }
+ } else {
+ // Append filename from query, or empty string to allow user name the new file.
+ treeNames = append(treeNames, fileName)
+ }
+
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["PreviewableExtensions"] = strings.Join(markup.PreviewableExtensions(), ",")
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["EditorconfigJson"] = GetEditorConfig(ctx, treePath)
+
+ ctx.HTML(http.StatusOK, tplEditFile)
+}
+
+// GetEditorConfig returns a editorconfig JSON string for given treePath or "null"
+func GetEditorConfig(ctx *context.Context, treePath string) string {
+ ec, _, err := ctx.Repo.GetEditorconfig()
+ if err == nil {
+ def, err := ec.GetDefinitionForFilename(treePath)
+ if err == nil {
+ jsonStr, _ := json.Marshal(def)
+ return string(jsonStr)
+ }
+ }
+ return "null"
+}
+
+// EditFile render edit file page
+func EditFile(ctx *context.Context) {
+ editFile(ctx, false)
+}
+
+// NewFile render create file page
+func NewFile(ctx *context.Context) {
+ editFile(ctx, true)
+}
+
+func editFilePost(ctx *context.Context, form forms.EditRepoFileForm, isNewFile bool) {
+ canCommit := renderCommitRights(ctx)
+ treeNames, treePaths := getParentTreeFields(form.TreePath)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ ctx.Data["PageIsEdit"] = true
+ ctx.Data["PageHasPosted"] = true
+ ctx.Data["IsNewFile"] = isNewFile
+ ctx.Data["TreePath"] = form.TreePath
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ ctx.Data["FileContent"] = form.Content
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["PreviewableExtensions"] = strings.Join(markup.PreviewableExtensions(), ",")
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["EditorconfigJson"] = GetEditorConfig(ctx, form.TreePath)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplEditFile)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplEditFile, &form)
+ return
+ }
+
+ // CommitSummary is optional in the web form, if empty, give it a default message based on add or update
+ // `message` will be both the summary and message combined
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ if isNewFile {
+ message = ctx.Locale.TrString("repo.editor.add", form.TreePath)
+ } else {
+ message = ctx.Locale.TrString("repo.editor.update", form.TreePath)
+ }
+ }
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ operation := "update"
+ if isNewFile {
+ operation = "create"
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplEditFile, form)
+ if ctx.Written() {
+ return
+ }
+
+ if _, err := files_service.ChangeRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.ChangeRepoFilesOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Files: []*files_service.ChangeRepoFile{
+ {
+ Operation: operation,
+ FromTreePath: ctx.Repo.TreePath,
+ TreePath: form.TreePath,
+ ContentReader: strings.NewReader(strings.ReplaceAll(form.Content, "\r", "")),
+ },
+ },
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ // This is where we handle all the errors thrown by files_service.ChangeRepoFiles
+ if git.IsErrNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_editing_no_longer_exists", ctx.Repo.TreePath), tplEditFile, &form)
+ } else if git_model.IsErrLFSFileLocked(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.upload_file_is_locked", err.(git_model.ErrLFSFileLocked).Path, err.(git_model.ErrLFSFileLocked).UserName), tplEditFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", form.TreePath), tplEditFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ if fileErr, ok := err.(models.ErrFilePathInvalid); ok {
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplEditFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplEditFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplEditFile, &form)
+ default:
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrRepoFileAlreadyExists(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_already_exists", form.TreePath), tplEditFile, &form)
+ } else if git.IsErrBranchNotExist(err) {
+ // For when a user adds/updates a file to a branch that no longer exists
+ if branchErr, ok := err.(git.ErrBranchNotExist); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplEditFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ ctx.Data["Err_NewBranchName"] = true
+ if branchErr, ok := err.(git_model.ErrBranchAlreadyExists); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplEditFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.commit_id_not_matching"), tplEditFile, &form)
+ } else if git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_out_of_date"), tplEditFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplEditFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("editFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplEditFile, &form)
+ }
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.fail_to_update_file", form.TreePath),
+ "Summary": ctx.Tr("repo.editor.fail_to_update_file_summary"),
+ "Details": utils.SanitizeFlashErrorString(err.Error()),
+ })
+ if err != nil {
+ ctx.ServerError("editFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplEditFile, &form)
+ }
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
+ _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
+}
+
+// EditFilePost response for editing file
+func EditFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+ editFilePost(ctx, *form, false)
+}
+
+// NewFilePost response for creating file
+func NewFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+ editFilePost(ctx, *form, true)
+}
+
+// DiffPreviewPost render preview diff page
+func DiffPreviewPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditPreviewDiffForm)
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if len(treePath) == 0 {
+ ctx.Error(http.StatusInternalServerError, "file name to diff is invalid")
+ return
+ }
+
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(treePath)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetTreeEntryByPath: "+err.Error())
+ return
+ } else if entry.IsDir() {
+ ctx.Error(http.StatusUnprocessableEntity)
+ return
+ }
+
+ diff, err := files_service.GetDiffPreview(ctx, ctx.Repo.Repository, ctx.Repo.BranchName, treePath, form.Content)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "GetDiffPreview: "+err.Error())
+ return
+ }
+
+ if diff.NumFiles == 0 {
+ ctx.PlainText(http.StatusOK, ctx.Locale.TrString("repo.editor.no_changes_to_show"))
+ return
+ }
+ ctx.Data["File"] = diff.Files[0]
+
+ ctx.HTML(http.StatusOK, tplEditDiffPreview)
+}
+
+// DeleteFile render delete file page
+func DeleteFile(ctx *context.Context) {
+ ctx.Data["PageIsDelete"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+
+ if treePath != ctx.Repo.TreePath {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_delete", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ return
+ }
+
+ ctx.Data["TreePath"] = treePath
+ canCommit := renderCommitRights(ctx)
+
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+
+ ctx.HTML(http.StatusOK, tplDeleteFile)
+}
+
+// DeleteFilePost response for deleting file
+func DeleteFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.DeleteRepoFileForm)
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ ctx.Data["PageIsDelete"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["TreePath"] = ctx.Repo.TreePath
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplDeleteFile)
+ return
+ }
+
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplDeleteFile, &form)
+ return
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ message = ctx.Locale.TrString("repo.editor.delete", ctx.Repo.TreePath)
+ }
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplDeleteFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ if _, err := files_service.ChangeRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.ChangeRepoFilesOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Files: []*files_service.ChangeRepoFile{
+ {
+ Operation: "delete",
+ TreePath: ctx.Repo.TreePath,
+ },
+ },
+ Message: message,
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ // This is where we handle all the errors thrown by repofiles.DeleteRepoFile
+ if git.IsErrNotExist(err) || models.IsErrRepoFileDoesNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_deleting_no_longer_exists", ctx.Repo.TreePath), tplDeleteFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", ctx.Repo.TreePath), tplDeleteFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ if fileErr, ok := err.(models.ErrFilePathInvalid); ok {
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplDeleteFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplDeleteFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplDeleteFile, &form)
+ default:
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ } else {
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ } else if git.IsErrBranchNotExist(err) {
+ // For when a user deletes a file to a branch that no longer exists
+ if branchErr, ok := err.(git.ErrBranchNotExist); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplDeleteFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ if branchErr, ok := err.(git_model.ErrBranchAlreadyExists); ok {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplDeleteFile, &form)
+ } else {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrCommitIDDoesNotMatch(err) || git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_deleting", ctx.Repo.RepoLink+"/compare/"+util.PathEscapeSegments(form.LastCommit)+"..."+util.PathEscapeSegments(ctx.Repo.CommitID)), tplDeleteFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplDeleteFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("DeleteFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplDeleteFile, &form)
+ }
+ } else {
+ ctx.ServerError("DeleteRepoFile", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.editor.file_delete_success", ctx.Repo.TreePath))
+ treePath := path.Dir(ctx.Repo.TreePath)
+ if treePath == "." {
+ treePath = "" // the file deleted was in the root, so we return the user to the root directory
+ }
+ if len(treePath) > 0 {
+ // Need to get the latest commit since it changed
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
+ if err == nil && commit != nil {
+ // We have the comment, now find what directory we can return the user to
+ // (must have entries)
+ treePath = GetClosestParentWithFiles(treePath, commit)
+ } else {
+ treePath = "" // otherwise return them to the root of the repo
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, treePath)
+}
+
+// UploadFile render upload file page
+func UploadFile(ctx *context.Context) {
+ ctx.Data["PageIsUpload"] = true
+ upload.AddUploadContext(ctx, "repo")
+ canCommit := renderCommitRights(ctx)
+ treePath := cleanUploadFileName(ctx.Repo.TreePath)
+ if treePath != ctx.Repo.TreePath {
+ ctx.Redirect(path.Join(ctx.Repo.RepoLink, "_upload", util.PathEscapeSegments(ctx.Repo.BranchName), util.PathEscapeSegments(treePath)))
+ return
+ }
+ ctx.Repo.TreePath = treePath
+
+ treeNames, treePaths := getParentTreeFields(ctx.Repo.TreePath)
+ if len(treeNames) == 0 {
+ // We must at least have one element for user to input.
+ treeNames = []string{""}
+ }
+
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+
+ ctx.HTML(http.StatusOK, tplUploadFile)
+}
+
+// UploadFilePost response for uploading file
+func UploadFilePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UploadRepoFileForm)
+ ctx.Data["PageIsUpload"] = true
+ upload.AddUploadContext(ctx, "repo")
+ canCommit := renderCommitRights(ctx)
+
+ oldBranchName := ctx.Repo.BranchName
+ branchName := oldBranchName
+
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+
+ form.TreePath = cleanUploadFileName(form.TreePath)
+
+ treeNames, treePaths := getParentTreeFields(form.TreePath)
+ if len(treeNames) == 0 {
+ // We must at least have one element for user to input.
+ treeNames = []string{""}
+ }
+
+ ctx.Data["TreePath"] = form.TreePath
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["TreePaths"] = treePaths
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/branch/" + util.PathEscapeSegments(branchName)
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = branchName
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplUploadFile)
+ return
+ }
+
+ if oldBranchName != branchName {
+ if _, err := ctx.Repo.GitRepo.GetBranch(branchName); err == nil {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchName), tplUploadFile, &form)
+ return
+ }
+ } else if !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplUploadFile, &form)
+ return
+ }
+
+ if !ctx.Repo.Repository.IsEmpty {
+ var newTreePath string
+ for _, part := range treeNames {
+ newTreePath = path.Join(newTreePath, part)
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(newTreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ break // Means there is no item with that name, so we're good
+ }
+ ctx.ServerError("Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ // User can only upload files to a directory, the directory name shouldn't be an existing file.
+ if !entry.IsDir() {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", part), tplUploadFile, &form)
+ return
+ }
+ }
+ }
+
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ dir := form.TreePath
+ if dir == "" {
+ dir = "/"
+ }
+ message = ctx.Locale.TrString("repo.editor.upload_files_to_dir", dir)
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdentity := getGitIdentity(ctx, form.CommitMailID, tplUploadFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ if err := files_service.UploadRepoFiles(ctx, ctx.Repo.Repository, ctx.Doer, &files_service.UploadRepoFileOptions{
+ LastCommitID: ctx.Repo.CommitID,
+ OldBranch: oldBranchName,
+ NewBranch: branchName,
+ TreePath: form.TreePath,
+ Message: message,
+ Files: form.Files,
+ Signoff: form.Signoff,
+ Author: gitIdentity,
+ Committer: gitIdentity,
+ }); err != nil {
+ if git_model.IsErrLFSFileLocked(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.upload_file_is_locked", err.(git_model.ErrLFSFileLocked).Path, err.(git_model.ErrLFSFileLocked).UserName), tplUploadFile, &form)
+ } else if models.IsErrFilenameInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_invalid", form.TreePath), tplUploadFile, &form)
+ } else if models.IsErrFilePathInvalid(err) {
+ ctx.Data["Err_TreePath"] = true
+ fileErr := err.(models.ErrFilePathInvalid)
+ switch fileErr.Type {
+ case git.EntryModeSymlink:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_is_a_symlink", fileErr.Path), tplUploadFile, &form)
+ case git.EntryModeTree:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.filename_is_a_directory", fileErr.Path), tplUploadFile, &form)
+ case git.EntryModeBlob:
+ ctx.RenderWithErr(ctx.Tr("repo.editor.directory_is_a_file", fileErr.Path), tplUploadFile, &form)
+ default:
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ } else if models.IsErrRepoFileAlreadyExists(err) {
+ ctx.Data["Err_TreePath"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_already_exists", form.TreePath), tplUploadFile, &form)
+ } else if git.IsErrBranchNotExist(err) {
+ branchErr := err.(git.ErrBranchNotExist)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_does_not_exist", branchErr.Name), tplUploadFile, &form)
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ // For when a user specifies a new branch that already exists
+ ctx.Data["Err_NewBranchName"] = true
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplUploadFile, &form)
+ } else if git.IsErrPushOutOfDate(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+util.PathEscapeSegments(ctx.Repo.CommitID)+"..."+util.PathEscapeSegments(form.NewBranchName)), tplUploadFile, &form)
+ } else if git.IsErrPushRejected(err) {
+ errPushRej := err.(*git.ErrPushRejected)
+ if len(errPushRej.Message) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.push_rejected_no_message"), tplUploadFile, &form)
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.push_rejected"),
+ "Summary": ctx.Tr("repo.editor.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(errPushRej.Message),
+ })
+ if err != nil {
+ ctx.ServerError("UploadFilePost.HTMLString", err)
+ return
+ }
+ ctx.RenderWithErr(flashError, tplUploadFile, &form)
+ }
+ } else {
+ // os.ErrNotExist - upload file missing in the intervening time?!
+ log.Error("Error during upload to repo: %-v to filepath: %s on %s from %s: %v", ctx.Repo.Repository, form.TreePath, oldBranchName, form.NewBranchName, err)
+ ctx.RenderWithErr(ctx.Tr("repo.editor.unable_to_upload_files", form.TreePath, err), tplUploadFile, &form)
+ }
+ return
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ if isEmpty, err := ctx.Repo.GitRepo.IsEmpty(); err == nil && !isEmpty {
+ _ = repo_model.UpdateRepositoryCols(ctx, &repo_model.Repository{ID: ctx.Repo.Repository.ID, IsEmpty: false}, "is_empty")
+ }
+ }
+
+ redirectForCommitChoice(ctx, form.CommitChoice, branchName, form.TreePath)
+}
+
+func cleanUploadFileName(name string) string {
+ // Rebase the filename
+ name = util.PathJoinRel(name)
+ // Git disallows any filenames to have a .git directory in them.
+ for _, part := range strings.Split(name, "/") {
+ if strings.ToLower(part) == ".git" {
+ return ""
+ }
+ }
+ return name
+}
+
+// UploadFileToServer upload file to server file dir not git
+func UploadFileToServer(ctx *context.Context) {
+ file, header, err := ctx.Req.FormFile("file")
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("FormFile: %v", err))
+ return
+ }
+ defer file.Close()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(file, buf)
+ if n > 0 {
+ buf = buf[:n]
+ }
+
+ err = upload.Verify(buf, header.Filename, setting.Repository.Upload.AllowedTypes)
+ if err != nil {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ return
+ }
+
+ name := cleanUploadFileName(header.Filename)
+ if len(name) == 0 {
+ ctx.Error(http.StatusInternalServerError, "Upload file name is invalid")
+ return
+ }
+
+ upload, err := repo_model.NewUpload(ctx, name, buf, file)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("NewUpload: %v", err))
+ return
+ }
+
+ log.Trace("New file uploaded: %s", upload.UUID)
+ ctx.JSON(http.StatusOK, map[string]string{
+ "uuid": upload.UUID,
+ })
+}
+
+// RemoveUploadFileFromServer remove file from server file dir
+func RemoveUploadFileFromServer(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RemoveUploadFileForm)
+ if len(form.File) == 0 {
+ ctx.Status(http.StatusNoContent)
+ return
+ }
+
+ if err := repo_model.DeleteUploadByUUID(ctx, form.File); err != nil {
+ ctx.Error(http.StatusInternalServerError, fmt.Sprintf("DeleteUploadByUUID: %v", err))
+ return
+ }
+
+ log.Trace("Upload file removed: %s", form.File)
+ ctx.Status(http.StatusNoContent)
+}
+
+// GetUniquePatchBranchName Gets a unique branch name for a new patch branch
+// It will be in the form of <username>-patch-<num> where <num> is the first branch of this format
+// that doesn't already exist. If we exceed 1000 tries or an error is thrown, we just return "" so the user has to
+// type in the branch name themselves (will be an empty field)
+func GetUniquePatchBranchName(ctx *context.Context) string {
+ prefix := ctx.Doer.LowerName + "-patch-"
+ for i := 1; i <= 1000; i++ {
+ branchName := fmt.Sprintf("%s%d", prefix, i)
+ if _, err := ctx.Repo.GitRepo.GetBranch(branchName); err != nil {
+ if git.IsErrBranchNotExist(err) {
+ return branchName
+ }
+ log.Error("GetUniquePatchBranchName: %v", err)
+ return ""
+ }
+ }
+ return ""
+}
+
+// GetClosestParentWithFiles Recursively gets the path of parent in a tree that has files (used when file in a tree is
+// deleted). Returns "" for the root if no parents other than the root have files. If the given treePath isn't a
+// SubTree or it has no entries, we go up one dir and see if we can return the user to that listing.
+func GetClosestParentWithFiles(treePath string, commit *git.Commit) string {
+ if len(treePath) == 0 || treePath == "." {
+ return ""
+ }
+ // see if the tree has entries
+ if tree, err := commit.SubTree(treePath); err != nil {
+ // failed to get tree, going up a dir
+ return GetClosestParentWithFiles(path.Dir(treePath), commit)
+ } else if entries, err := tree.ListEntries(); err != nil || len(entries) == 0 {
+ // no files in this dir, going up a dir
+ return GetClosestParentWithFiles(path.Dir(treePath), commit)
+ }
+ return treePath
+}
+
+// getGitIdentity returns the Git identity that should be used for an Git
+// operation, that takes into account an user's specified email.
+func getGitIdentity(ctx *context.Context, commitMailID int64, tpl base.TplName, form any) *files_service.IdentityOptions {
+ gitIdentity := &files_service.IdentityOptions{
+ Name: ctx.Doer.Name,
+ }
+
+ // -1 is defined as placeholder email.
+ if commitMailID == -1 {
+ gitIdentity.Email = ctx.Doer.GetPlaceholderEmail()
+ } else {
+ // Check if the given email is activated.
+ email, err := user_model.GetEmailAddressByID(ctx, ctx.Doer.ID, commitMailID)
+ if err != nil {
+ ctx.ServerError("GetEmailAddressByID", err)
+ return nil
+ }
+
+ if email == nil || !email.IsActivated {
+ ctx.Data["Err_CommitMailID"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.invalid_commit_mail"), tpl, form)
+ return nil
+ }
+
+ gitIdentity.Email = email.Email
+ }
+
+ return gitIdentity
+}
diff --git a/routers/web/repo/editor_test.go b/routers/web/repo/editor_test.go
new file mode 100644
index 0000000..4d565b5
--- /dev/null
+++ b/routers/web/repo/editor_test.go
@@ -0,0 +1,73 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCleanUploadName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ kases := map[string]string{
+ ".git/refs/master": "",
+ "/root/abc": "root/abc",
+ "./../../abc": "abc",
+ "a/../.git": "",
+ "a/../../../abc": "abc",
+ "../../../acd": "acd",
+ "../../.git/abc": "",
+ "..\\..\\.git/abc": "..\\..\\.git/abc",
+ "..\\../.git/abc": "",
+ "..\\../.git": "",
+ "abc/../def": "def",
+ ".drone.yml": ".drone.yml",
+ ".abc/def/.drone.yml": ".abc/def/.drone.yml",
+ "..drone.yml.": "..drone.yml.",
+ "..a.dotty...name...": "..a.dotty...name...",
+ "..a.dotty../.folder../.name...": "..a.dotty../.folder../.name...",
+ }
+ for k, v := range kases {
+ assert.EqualValues(t, cleanUploadFileName(k), v)
+ }
+}
+
+func TestGetUniquePatchBranchName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1")
+ ctx.SetParams(":id", "1")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+
+ expectedBranchName := "user2-patch-1"
+ branchName := GetUniquePatchBranchName(ctx)
+ assert.Equal(t, expectedBranchName, branchName)
+}
+
+func TestGetClosestParentWithFiles(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ branch := repo.DefaultBranch
+ gitRepo, _ := gitrepo.OpenRepository(git.DefaultContext, repo)
+ defer gitRepo.Close()
+ commit, _ := gitRepo.GetBranchCommit(branch)
+ var expectedTreePath string // Should return the root dir, empty string, since there are no subdirs in this repo
+ for _, deletedFile := range []string{
+ "dir1/dir2/dir3/file.txt",
+ "file.txt",
+ } {
+ treePath := GetClosestParentWithFiles(deletedFile, commit)
+ assert.Equal(t, expectedTreePath, treePath)
+ }
+}
diff --git a/routers/web/repo/find.go b/routers/web/repo/find.go
new file mode 100644
index 0000000..9da4237
--- /dev/null
+++ b/routers/web/repo/find.go
@@ -0,0 +1,24 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplFindFiles base.TplName = "repo/find/files"
+)
+
+// FindFiles render the page to find repository files
+func FindFiles(ctx *context.Context) {
+ path := ctx.Params("*")
+ ctx.Data["TreeLink"] = ctx.Repo.RepoLink + "/src/" + util.PathEscapeSegments(path)
+ ctx.Data["DataLink"] = ctx.Repo.RepoLink + "/tree-list/" + util.PathEscapeSegments(path)
+ ctx.HTML(http.StatusOK, tplFindFiles)
+}
diff --git a/routers/web/repo/flags/manage.go b/routers/web/repo/flags/manage.go
new file mode 100644
index 0000000..377a5c2
--- /dev/null
+++ b/routers/web/repo/flags/manage.go
@@ -0,0 +1,49 @@
+// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package flags
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplRepoFlags base.TplName = "repo/flags"
+)
+
+func Manage(ctx *context.Context) {
+ ctx.Data["IsRepoFlagsPage"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.admin.manage_flags")
+
+ flags := map[string]bool{}
+ for _, f := range setting.Repository.SettableFlags {
+ flags[f] = false
+ }
+ repoFlags, _ := ctx.Repo.Repository.ListFlags(ctx)
+ for _, f := range repoFlags {
+ flags[f.Name] = true
+ }
+
+ ctx.Data["Flags"] = flags
+
+ ctx.HTML(http.StatusOK, tplRepoFlags)
+}
+
+func ManagePost(ctx *context.Context) {
+ newFlags := ctx.FormStrings("flags")
+
+ err := ctx.Repo.Repository.ReplaceAllFlags(ctx, newFlags)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.admin.failed_to_replace_flags"))
+ log.Error("Error replacing repository flags for repo %d: %v", ctx.Repo.Repository.ID, err)
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.admin.flags_replaced"))
+ }
+
+ ctx.Redirect(ctx.Repo.Repository.HTMLURL() + "/flags")
+}
diff --git a/routers/web/repo/githttp.go b/routers/web/repo/githttp.go
new file mode 100644
index 0000000..a082498
--- /dev/null
+++ b/routers/web/repo/githttp.go
@@ -0,0 +1,599 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "compress/gzip"
+ gocontext "context"
+ "fmt"
+ "net/http"
+ "os"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "sync"
+ "time"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/go-chi/cors"
+)
+
+func HTTPGitEnabledHandler(ctx *context.Context) {
+ if setting.Repository.DisableHTTPGit {
+ ctx.Resp.WriteHeader(http.StatusForbidden)
+ _, _ = ctx.Resp.Write([]byte("Interacting with repositories by HTTP protocol is not allowed"))
+ }
+}
+
+func CorsHandler() func(next http.Handler) http.Handler {
+ if setting.Repository.AccessControlAllowOrigin != "" {
+ return cors.Handler(cors.Options{
+ AllowedOrigins: []string{setting.Repository.AccessControlAllowOrigin},
+ AllowedHeaders: []string{"Content-Type", "Authorization", "User-Agent"},
+ })
+ }
+ return func(next http.Handler) http.Handler {
+ return next
+ }
+}
+
+// httpBase implementation git smart HTTP protocol
+func httpBase(ctx *context.Context) *serviceHandler {
+ username := ctx.Params(":username")
+ reponame := strings.TrimSuffix(ctx.Params(":reponame"), ".git")
+
+ if ctx.FormString("go-get") == "1" {
+ context.EarlyResponseForGoGetMeta(ctx)
+ return nil
+ }
+
+ var isPull, receivePack bool
+ service := ctx.FormString("service")
+ if service == "git-receive-pack" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-receive-pack") {
+ isPull = false
+ receivePack = true
+ } else if service == "git-upload-pack" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-upload-pack") {
+ isPull = true
+ } else if service == "git-upload-archive" ||
+ strings.HasSuffix(ctx.Req.URL.Path, "git-upload-archive") {
+ isPull = true
+ } else {
+ isPull = ctx.Req.Method == "GET"
+ }
+
+ var accessMode perm.AccessMode
+ if isPull {
+ accessMode = perm.AccessModeRead
+ } else {
+ accessMode = perm.AccessModeWrite
+ }
+
+ isWiki := false
+ unitType := unit.TypeCode
+
+ if strings.HasSuffix(reponame, ".wiki") {
+ isWiki = true
+ unitType = unit.TypeWiki
+ reponame = reponame[:len(reponame)-5]
+ }
+
+ owner := ctx.ContextUser
+ if !owner.IsOrganization() && !owner.IsActive {
+ ctx.PlainText(http.StatusForbidden, "Repository cannot be accessed. You cannot push or open issues/pull-requests.")
+ return nil
+ }
+
+ repoExist := true
+ repo, err := repo_model.GetRepositoryByName(ctx, owner.ID, reponame)
+ if err != nil {
+ if !repo_model.IsErrRepoNotExist(err) {
+ ctx.ServerError("GetRepositoryByName", err)
+ return nil
+ }
+
+ if redirectRepoID, err := repo_model.LookupRedirect(ctx, owner.ID, reponame); err == nil {
+ context.RedirectToRepo(ctx.Base, redirectRepoID)
+ return nil
+ }
+ repoExist = false
+ }
+
+ // Don't allow pushing if the repo is archived
+ if repoExist && repo.IsArchived && !isPull {
+ ctx.PlainText(http.StatusForbidden, "This repo is archived. You can view files and clone it, but cannot push or open issues/pull-requests.")
+ return nil
+ }
+
+ // Only public pull don't need auth.
+ isPublicPull := repoExist && !repo.IsPrivate && isPull
+ var (
+ askAuth = !isPublicPull || setting.Service.RequireSignInView
+ environ []string
+ )
+
+ // don't allow anonymous pulls if organization is not public
+ if isPublicPull {
+ if err := repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return nil
+ }
+
+ askAuth = askAuth || (repo.Owner.Visibility != structs.VisibleTypePublic)
+ }
+
+ // check access
+ if askAuth {
+ // rely on the results of Contexter
+ if !ctx.IsSigned {
+ // TODO: support digit auth - which would be Authorization header with digit
+ ctx.Resp.Header().Set("WWW-Authenticate", `Basic realm="Gitea"`)
+ ctx.Error(http.StatusUnauthorized)
+ return nil
+ }
+
+ context.CheckRepoScopedToken(ctx, repo, auth_model.GetScopeLevelFromAccessMode(accessMode))
+ if ctx.Written() {
+ return nil
+ }
+
+ if ctx.IsBasicAuth && ctx.Data["IsApiToken"] != true && ctx.Data["IsActionsToken"] != true {
+ _, err = auth_model.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if err == nil {
+ // TODO: This response should be changed to "invalid credentials" for security reasons once the expectation behind it (creating an app token to authenticate) is properly documented
+ ctx.PlainText(http.StatusUnauthorized, "Users with two-factor authentication enabled cannot perform HTTP/HTTPS operations via plain username and password. Please create and use a personal access token on the user settings page")
+ return nil
+ } else if !auth_model.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("IsErrTwoFactorNotEnrolled", err)
+ return nil
+ }
+ }
+
+ if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin {
+ ctx.PlainText(http.StatusForbidden, "Your account is disabled.")
+ return nil
+ }
+
+ environ = []string{
+ repo_module.EnvRepoUsername + "=" + username,
+ repo_module.EnvRepoName + "=" + reponame,
+ repo_module.EnvPusherName + "=" + ctx.Doer.Name,
+ repo_module.EnvPusherID + fmt.Sprintf("=%d", ctx.Doer.ID),
+ repo_module.EnvAppURL + "=" + setting.AppURL,
+ }
+
+ if repoExist {
+ // Because of special ref "refs/for" .. , need delay write permission check
+ if git.SupportProcReceive {
+ accessMode = perm.AccessModeRead
+ }
+
+ if ctx.Data["IsActionsToken"] == true {
+ taskID := ctx.Data["ActionsTaskID"].(int64)
+ task, err := actions_model.GetTaskByID(ctx, taskID)
+ if err != nil {
+ ctx.ServerError("GetTaskByID", err)
+ return nil
+ }
+ if task.RepoID != repo.ID {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+
+ if task.IsForkPullRequest {
+ if accessMode > perm.AccessModeRead {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+ environ = append(environ, fmt.Sprintf("%s=%d", repo_module.EnvActionPerm, perm.AccessModeRead))
+ } else {
+ if accessMode > perm.AccessModeWrite {
+ ctx.PlainText(http.StatusForbidden, "User permission denied")
+ return nil
+ }
+ environ = append(environ, fmt.Sprintf("%s=%d", repo_module.EnvActionPerm, perm.AccessModeWrite))
+ }
+ } else {
+ p, err := access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+
+ if !p.CanAccess(accessMode, unitType) {
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+ }
+
+ if !isPull && repo.IsMirror {
+ ctx.PlainText(http.StatusForbidden, "mirror repository is read-only")
+ return nil
+ }
+ }
+
+ if !ctx.Doer.KeepEmailPrivate {
+ environ = append(environ, repo_module.EnvPusherEmail+"="+ctx.Doer.Email)
+ }
+
+ if isWiki {
+ environ = append(environ, repo_module.EnvRepoIsWiki+"=true")
+ } else {
+ environ = append(environ, repo_module.EnvRepoIsWiki+"=false")
+ }
+ }
+
+ if !repoExist {
+ if !receivePack {
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+
+ if isWiki { // you cannot send wiki operation before create the repository
+ ctx.PlainText(http.StatusNotFound, "Repository not found")
+ return nil
+ }
+
+ if owner.IsOrganization() && !setting.Repository.EnablePushCreateOrg {
+ ctx.PlainText(http.StatusForbidden, "Push to create is not enabled for organizations.")
+ return nil
+ }
+ if !owner.IsOrganization() && !setting.Repository.EnablePushCreateUser {
+ ctx.PlainText(http.StatusForbidden, "Push to create is not enabled for users.")
+ return nil
+ }
+
+ // Return dummy payload if GET receive-pack
+ if ctx.Req.Method == http.MethodGet {
+ dummyInfoRefs(ctx)
+ return nil
+ }
+
+ repo, err = repo_service.PushCreateRepo(ctx, ctx.Doer, owner, reponame)
+ if err != nil {
+ log.Error("pushCreateRepo: %v", err)
+ ctx.Status(http.StatusNotFound)
+ return nil
+ }
+ }
+
+ if isWiki {
+ // Ensure the wiki is enabled before we allow access to it
+ if _, err := repo.GetUnit(ctx, unit.TypeWiki); err != nil {
+ if repo_model.IsErrUnitTypeNotExist(err) {
+ ctx.PlainText(http.StatusForbidden, "repository wiki is disabled")
+ return nil
+ }
+ log.Error("Failed to get the wiki unit in %-v Error: %v", repo, err)
+ ctx.ServerError("GetUnit(UnitTypeWiki) for "+repo.FullName(), err)
+ return nil
+ }
+ }
+
+ environ = append(environ, repo_module.EnvRepoID+fmt.Sprintf("=%d", repo.ID))
+
+ ctx.Req.URL.Path = strings.ToLower(ctx.Req.URL.Path) // blue: In case some repo name has upper case name
+
+ return &serviceHandler{repo, isWiki, environ}
+}
+
+var (
+ infoRefsCache []byte
+ infoRefsOnce sync.Once
+)
+
+func dummyInfoRefs(ctx *context.Context) {
+ infoRefsOnce.Do(func() {
+ tmpDir, err := os.MkdirTemp(os.TempDir(), "gitea-info-refs-cache")
+ if err != nil {
+ log.Error("Failed to create temp dir for git-receive-pack cache: %v", err)
+ return
+ }
+
+ defer func() {
+ if err := util.RemoveAll(tmpDir); err != nil {
+ log.Error("RemoveAll: %v", err)
+ }
+ }()
+
+ if err := git.InitRepository(ctx, tmpDir, true, git.Sha1ObjectFormat.Name()); err != nil {
+ log.Error("Failed to init bare repo for git-receive-pack cache: %v", err)
+ return
+ }
+
+ refs, _, err := git.NewCommand(ctx, "receive-pack", "--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Dir: tmpDir})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
+ }
+
+ log.Debug("populating infoRefsCache: \n%s", string(refs))
+ infoRefsCache = refs
+ })
+
+ ctx.RespHeader().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT")
+ ctx.RespHeader().Set("Pragma", "no-cache")
+ ctx.RespHeader().Set("Cache-Control", "no-cache, max-age=0, must-revalidate")
+ ctx.RespHeader().Set("Content-Type", "application/x-git-receive-pack-advertisement")
+ _, _ = ctx.Write(packetWrite("# service=git-receive-pack\n"))
+ _, _ = ctx.Write([]byte("0000"))
+ _, _ = ctx.Write(infoRefsCache)
+}
+
+type serviceHandler struct {
+ repo *repo_model.Repository
+ isWiki bool
+ environ []string
+}
+
+func (h *serviceHandler) getRepoDir() string {
+ if h.isWiki {
+ return h.repo.WikiPath()
+ }
+ return h.repo.RepoPath()
+}
+
+func setHeaderNoCache(ctx *context.Context) {
+ ctx.Resp.Header().Set("Expires", "Fri, 01 Jan 1980 00:00:00 GMT")
+ ctx.Resp.Header().Set("Pragma", "no-cache")
+ ctx.Resp.Header().Set("Cache-Control", "no-cache, max-age=0, must-revalidate")
+}
+
+func setHeaderCacheForever(ctx *context.Context) {
+ now := time.Now().Unix()
+ expires := now + 31536000
+ ctx.Resp.Header().Set("Date", fmt.Sprintf("%d", now))
+ ctx.Resp.Header().Set("Expires", fmt.Sprintf("%d", expires))
+ ctx.Resp.Header().Set("Cache-Control", "public, max-age=31536000")
+}
+
+func containsParentDirectorySeparator(v string) bool {
+ if !strings.Contains(v, "..") {
+ return false
+ }
+ for _, ent := range strings.FieldsFunc(v, isSlashRune) {
+ if ent == ".." {
+ return true
+ }
+ }
+ return false
+}
+
+func isSlashRune(r rune) bool { return r == '/' || r == '\\' }
+
+func (h *serviceHandler) sendFile(ctx *context.Context, contentType, file string) {
+ if containsParentDirectorySeparator(file) {
+ log.Error("request file path contains invalid path: %v", file)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return
+ }
+ reqFile := filepath.Join(h.getRepoDir(), file)
+
+ fi, err := os.Stat(reqFile)
+ if os.IsNotExist(err) {
+ ctx.Resp.WriteHeader(http.StatusNotFound)
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", contentType)
+ ctx.Resp.Header().Set("Content-Length", fmt.Sprintf("%d", fi.Size()))
+ // http.TimeFormat required a UTC time, refer to https://pkg.go.dev/net/http#TimeFormat
+ ctx.Resp.Header().Set("Last-Modified", fi.ModTime().UTC().Format(http.TimeFormat))
+ http.ServeFile(ctx.Resp, ctx.Req, reqFile)
+}
+
+// one or more key=value pairs separated by colons
+var safeGitProtocolHeader = regexp.MustCompile(`^[0-9a-zA-Z]+=[0-9a-zA-Z]+(:[0-9a-zA-Z]+=[0-9a-zA-Z]+)*$`)
+
+func prepareGitCmdWithAllowedService(ctx *context.Context, service string) (*git.Command, error) {
+ if service == "receive-pack" {
+ return git.NewCommand(ctx, "receive-pack"), nil
+ }
+ if service == "upload-pack" {
+ return git.NewCommand(ctx, "upload-pack"), nil
+ }
+
+ return nil, fmt.Errorf("service %q is not allowed", service)
+}
+
+func serviceRPC(ctx *context.Context, h *serviceHandler, service string) {
+ defer func() {
+ if err := ctx.Req.Body.Close(); err != nil {
+ log.Error("serviceRPC: Close: %v", err)
+ }
+ }()
+
+ expectedContentType := fmt.Sprintf("application/x-git-%s-request", service)
+ if ctx.Req.Header.Get("Content-Type") != expectedContentType {
+ log.Error("Content-Type (%q) doesn't match expected: %q", ctx.Req.Header.Get("Content-Type"), expectedContentType)
+ ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ return
+ }
+
+ cmd, err := prepareGitCmdWithAllowedService(ctx, service)
+ if err != nil {
+ log.Error("Failed to prepareGitCmdWithService: %v", err)
+ ctx.Resp.WriteHeader(http.StatusUnauthorized)
+ return
+ }
+
+ ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-result", service))
+
+ reqBody := ctx.Req.Body
+
+ // Handle GZIP.
+ if ctx.Req.Header.Get("Content-Encoding") == "gzip" {
+ reqBody, err = gzip.NewReader(reqBody)
+ if err != nil {
+ log.Error("Fail to create gzip reader: %v", err)
+ ctx.Resp.WriteHeader(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ // set this for allow pre-receive and post-receive execute
+ h.environ = append(h.environ, "SSH_ORIGINAL_COMMAND="+service)
+
+ if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
+ h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
+ }
+
+ var stderr bytes.Buffer
+ cmd.AddArguments("--stateless-rpc").AddDynamicArguments(h.getRepoDir())
+ cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.getRepoDir()))
+ if err := cmd.Run(&git.RunOpts{
+ Dir: h.getRepoDir(),
+ Env: append(os.Environ(), h.environ...),
+ Stdout: ctx.Resp,
+ Stdin: reqBody,
+ Stderr: &stderr,
+ UseContextTimeout: true,
+ }); err != nil {
+ if err.Error() != "signal: killed" {
+ log.Error("Fail to serve RPC(%s) in %s: %v - %s", service, h.getRepoDir(), err, stderr.String())
+ }
+ return
+ }
+}
+
+// ServiceUploadPack implements Git Smart HTTP protocol
+func ServiceUploadPack(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ serviceRPC(ctx, h, "upload-pack")
+ }
+}
+
+// ServiceReceivePack implements Git Smart HTTP protocol
+func ServiceReceivePack(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ serviceRPC(ctx, h, "receive-pack")
+ }
+}
+
+func getServiceType(ctx *context.Context) string {
+ serviceType := ctx.Req.FormValue("service")
+ if !strings.HasPrefix(serviceType, "git-") {
+ return ""
+ }
+ return strings.TrimPrefix(serviceType, "git-")
+}
+
+func updateServerInfo(ctx gocontext.Context, dir string) []byte {
+ out, _, err := git.NewCommand(ctx, "update-server-info").RunStdBytes(&git.RunOpts{Dir: dir})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(out)))
+ }
+ return out
+}
+
+func packetWrite(str string) []byte {
+ s := strconv.FormatInt(int64(len(str)+4), 16)
+ if len(s)%4 != 0 {
+ s = strings.Repeat("0", 4-len(s)%4) + s
+ }
+ return []byte(s + str)
+}
+
+// GetInfoRefs implements Git dumb HTTP
+func GetInfoRefs(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h == nil {
+ return
+ }
+ setHeaderNoCache(ctx)
+ service := getServiceType(ctx)
+ cmd, err := prepareGitCmdWithAllowedService(ctx, service)
+ if err == nil {
+ if protocol := ctx.Req.Header.Get("Git-Protocol"); protocol != "" && safeGitProtocolHeader.MatchString(protocol) {
+ h.environ = append(h.environ, "GIT_PROTOCOL="+protocol)
+ }
+ h.environ = append(os.Environ(), h.environ...)
+
+ refs, _, err := cmd.AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.getRepoDir()})
+ if err != nil {
+ log.Error(fmt.Sprintf("%v - %s", err, string(refs)))
+ }
+
+ ctx.Resp.Header().Set("Content-Type", fmt.Sprintf("application/x-git-%s-advertisement", service))
+ ctx.Resp.WriteHeader(http.StatusOK)
+ _, _ = ctx.Resp.Write(packetWrite("# service=git-" + service + "\n"))
+ _, _ = ctx.Resp.Write([]byte("0000"))
+ _, _ = ctx.Resp.Write(refs)
+ } else {
+ updateServerInfo(ctx, h.getRepoDir())
+ h.sendFile(ctx, "text/plain; charset=utf-8", "info/refs")
+ }
+}
+
+// GetTextFile implements Git dumb HTTP
+func GetTextFile(p string) func(*context.Context) {
+ return func(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderNoCache(ctx)
+ file := ctx.Params("file")
+ if file != "" {
+ h.sendFile(ctx, "text/plain", "objects/info/"+file)
+ } else {
+ h.sendFile(ctx, "text/plain", p)
+ }
+ }
+ }
+}
+
+// GetInfoPacks implements Git dumb HTTP
+func GetInfoPacks(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "text/plain; charset=utf-8", "objects/info/packs")
+ }
+}
+
+// GetLooseObject implements Git dumb HTTP
+func GetLooseObject(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-loose-object", fmt.Sprintf("objects/%s/%s",
+ ctx.Params("head"), ctx.Params("hash")))
+ }
+}
+
+// GetPackFile implements Git dumb HTTP
+func GetPackFile(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-packed-objects", "objects/pack/pack-"+ctx.Params("file")+".pack")
+ }
+}
+
+// GetIdxFile implements Git dumb HTTP
+func GetIdxFile(ctx *context.Context) {
+ h := httpBase(ctx)
+ if h != nil {
+ setHeaderCacheForever(ctx)
+ h.sendFile(ctx, "application/x-git-packed-objects-toc", "objects/pack/pack-"+ctx.Params("file")+".idx")
+ }
+}
diff --git a/routers/web/repo/githttp_test.go b/routers/web/repo/githttp_test.go
new file mode 100644
index 0000000..5ba8de3
--- /dev/null
+++ b/routers/web/repo/githttp_test.go
@@ -0,0 +1,42 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestContainsParentDirectorySeparator(t *testing.T) {
+ tests := []struct {
+ v string
+ b bool
+ }{
+ {
+ v: `user2/repo1/info/refs`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/HEAD`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/some.../strange_file...mp3`,
+ b: false,
+ },
+ {
+ v: `user2/repo1/../../custom/conf/app.ini`,
+ b: true,
+ },
+ {
+ v: `user2/repo1/objects/info/..\..\..\..\custom\conf\app.ini`,
+ b: true,
+ },
+ }
+
+ for i := range tests {
+ assert.EqualValues(t, tests[i].b, containsParentDirectorySeparator(tests[i].v))
+ }
+}
diff --git a/routers/web/repo/helper.go b/routers/web/repo/helper.go
new file mode 100644
index 0000000..5e1e116
--- /dev/null
+++ b/routers/web/repo/helper.go
@@ -0,0 +1,44 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/url"
+ "sort"
+
+ "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+)
+
+func MakeSelfOnTop(doer *user.User, users []*user.User) []*user.User {
+ if doer != nil {
+ sort.Slice(users, func(i, j int) bool {
+ if users[i].ID == users[j].ID {
+ return false
+ }
+ return users[i].ID == doer.ID // if users[i] is self, put it before others, so less=true
+ })
+ }
+ return users
+}
+
+func HandleGitError(ctx *context.Context, msg string, err error) {
+ if git.IsErrNotExist(err) {
+ refType := ""
+ switch {
+ case ctx.Repo.IsViewBranch:
+ refType = "branch"
+ case ctx.Repo.IsViewTag:
+ refType = "tag"
+ case ctx.Repo.IsViewCommit:
+ refType = "commit"
+ }
+ ctx.Data["NotFoundPrompt"] = ctx.Locale.Tr("repo.tree_path_not_found_"+refType, ctx.Repo.TreePath, url.PathEscape(ctx.Repo.RefName))
+ ctx.Data["NotFoundGoBackURL"] = ctx.Repo.RepoLink + "/src/" + refType + "/" + url.PathEscape(ctx.Repo.RefName)
+ ctx.NotFound(msg, err)
+ } else {
+ ctx.ServerError(msg, err)
+ }
+}
diff --git a/routers/web/repo/helper_test.go b/routers/web/repo/helper_test.go
new file mode 100644
index 0000000..978758e
--- /dev/null
+++ b/routers/web/repo/helper_test.go
@@ -0,0 +1,26 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/user"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMakeSelfOnTop(t *testing.T) {
+ users := MakeSelfOnTop(nil, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 2, users[0].ID)
+
+ users = MakeSelfOnTop(&user.User{ID: 1}, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 1, users[0].ID)
+
+ users = MakeSelfOnTop(&user.User{ID: 2}, []*user.User{{ID: 2}, {ID: 1}})
+ assert.Len(t, users, 2)
+ assert.EqualValues(t, 2, users[0].ID)
+}
diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go
new file mode 100644
index 0000000..5d13ccc
--- /dev/null
+++ b/routers/web/repo/issue.go
@@ -0,0 +1,3822 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ stdCtx "context"
+ "errors"
+ "fmt"
+ "html/template"
+ "math/big"
+ "net/http"
+ "net/url"
+ "slices"
+ "sort"
+ "strconv"
+ "strings"
+ "time"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ pull_model "code.gitea.io/gitea/models/pull"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/git"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ issue_template "code.gitea.io/gitea/modules/issue/template"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/templates/vars"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ issue_service "code.gitea.io/gitea/services/issue"
+ pull_service "code.gitea.io/gitea/services/pull"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "gitea.com/go-chi/binding"
+)
+
+const (
+ tplAttachment base.TplName = "repo/issue/view_content/attachments"
+
+ tplIssues base.TplName = "repo/issue/list"
+ tplIssueNew base.TplName = "repo/issue/new"
+ tplIssueChoose base.TplName = "repo/issue/choose"
+ tplIssueView base.TplName = "repo/issue/view"
+
+ tplReactions base.TplName = "repo/issue/view_content/reactions"
+
+ issueTemplateKey = "IssueTemplate"
+ issueTemplateTitleKey = "IssueTemplateTitle"
+)
+
+// IssueTemplateCandidates issue templates
+var IssueTemplateCandidates = []string{
+ "ISSUE_TEMPLATE.md",
+ "ISSUE_TEMPLATE.yaml",
+ "ISSUE_TEMPLATE.yml",
+ "issue_template.md",
+ "issue_template.yaml",
+ "issue_template.yml",
+ ".forgejo/ISSUE_TEMPLATE.md",
+ ".forgejo/ISSUE_TEMPLATE.yaml",
+ ".forgejo/ISSUE_TEMPLATE.yml",
+ ".forgejo/issue_template.md",
+ ".forgejo/issue_template.yaml",
+ ".forgejo/issue_template.yml",
+ ".gitea/ISSUE_TEMPLATE.md",
+ ".gitea/ISSUE_TEMPLATE.yaml",
+ ".gitea/ISSUE_TEMPLATE.yml",
+ ".gitea/issue_template.md",
+ ".gitea/issue_template.yaml",
+ ".gitea/issue_template.yml",
+ ".github/ISSUE_TEMPLATE.md",
+ ".github/ISSUE_TEMPLATE.yaml",
+ ".github/ISSUE_TEMPLATE.yml",
+ ".github/issue_template.md",
+ ".github/issue_template.yaml",
+ ".github/issue_template.yml",
+}
+
+// MustAllowUserComment checks to make sure if an issue is locked.
+// If locked and user has permissions to write to the repository,
+// then the comment is allowed, else it is blocked
+func MustAllowUserComment(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin {
+ ctx.Flash.Error(ctx.Tr("repo.issues.comment_on_locked"))
+ ctx.Redirect(issue.Link())
+ return
+ }
+}
+
+// MustEnableIssues check if repository enable internal issues
+func MustEnableIssues(ctx *context.Context) {
+ if !ctx.Repo.CanRead(unit.TypeIssues) &&
+ !ctx.Repo.CanRead(unit.TypeExternalTracker) {
+ ctx.NotFound("MustEnableIssues", nil)
+ return
+ }
+
+ unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil {
+ ctx.Redirect(unit.ExternalTrackerConfig().ExternalTrackerURL)
+ return
+ }
+}
+
+// MustAllowPulls check if repository enable pull requests and user have right to do that
+func MustAllowPulls(ctx *context.Context) {
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.NotFound("MustAllowPulls", nil)
+ return
+ }
+
+ // User can send pull request if owns a forked repository.
+ if ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID) {
+ ctx.Repo.PullRequest.Allowed = true
+ ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.Doer.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName)
+ }
+}
+
+func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption optional.Option[bool]) {
+ var err error
+ viewType := ctx.FormString("type")
+ sortType := ctx.FormString("sort")
+ types := []string{"all", "your_repositories", "assigned", "created_by", "mentioned", "review_requested", "reviewed_by"}
+ if !util.SliceContainsString(types, viewType, true) {
+ viewType = "all"
+ }
+
+ var (
+ assigneeID = ctx.FormInt64("assignee")
+ posterID = ctx.FormInt64("poster")
+ mentionedID int64
+ reviewRequestedID int64
+ reviewedID int64
+ )
+
+ if ctx.IsSigned {
+ switch viewType {
+ case "created_by":
+ posterID = ctx.Doer.ID
+ case "mentioned":
+ mentionedID = ctx.Doer.ID
+ case "assigned":
+ assigneeID = ctx.Doer.ID
+ case "review_requested":
+ reviewRequestedID = ctx.Doer.ID
+ case "reviewed_by":
+ reviewedID = ctx.Doer.ID
+ }
+ }
+
+ repo := ctx.Repo.Repository
+ var labelIDs []int64
+ // 1,-2 means including label 1 and excluding label 2
+ // 0 means issues with no label
+ // blank means labels will not be filtered for issues
+ selectLabels := ctx.FormString("labels")
+ if selectLabels == "" {
+ ctx.Data["AllLabels"] = true
+ } else if selectLabels == "0" {
+ ctx.Data["NoLabel"] = true
+ }
+ if len(selectLabels) > 0 {
+ labelIDs, err = base.StringsToInt64s(strings.Split(selectLabels, ","))
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("invalid_data", selectLabels), true)
+ }
+ }
+
+ keyword := strings.Trim(ctx.FormString("q"), " ")
+ if bytes.Contains([]byte(keyword), []byte{0x00}) {
+ keyword = ""
+ }
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ var mileIDs []int64
+ if milestoneID > 0 || milestoneID == db.NoConditionID { // -1 to get those issues which have no any milestone assigned
+ mileIDs = []int64{milestoneID}
+ }
+
+ var issueStats *issues_model.IssueStats
+ statsOpts := &issues_model.IssuesOptions{
+ RepoIDs: []int64{repo.ID},
+ LabelIDs: labelIDs,
+ MilestoneIDs: mileIDs,
+ ProjectID: projectID,
+ AssigneeID: assigneeID,
+ MentionedID: mentionedID,
+ PosterID: posterID,
+ ReviewRequestedID: reviewRequestedID,
+ ReviewedID: reviewedID,
+ IsPull: isPullOption,
+ IssueIDs: nil,
+ }
+ if keyword != "" {
+ allIssueIDs, err := issueIDsFromSearch(ctx, keyword, isFuzzy, statsOpts)
+ if err != nil {
+ if issue_indexer.IsAvailable(ctx) {
+ ctx.ServerError("issueIDsFromSearch", err)
+ return
+ }
+ ctx.Data["IssueIndexerUnavailable"] = true
+ return
+ }
+ statsOpts.IssueIDs = allIssueIDs
+ }
+ if keyword != "" && len(statsOpts.IssueIDs) == 0 {
+ // So it did search with the keyword, but no issue found.
+ // Just set issueStats to empty.
+ issueStats = &issues_model.IssueStats{}
+ } else {
+ // So it did search with the keyword, and found some issues. It needs to get issueStats of these issues.
+ // Or the keyword is empty, so it doesn't need issueIDs as filter, just get issueStats with statsOpts.
+ issueStats, err = issues_model.GetIssueStats(ctx, statsOpts)
+ if err != nil {
+ ctx.ServerError("GetIssueStats", err)
+ return
+ }
+ }
+
+ var isShowClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isShowClosed = optional.Some(true)
+ case "all":
+ isShowClosed = optional.None[bool]()
+ default:
+ isShowClosed = optional.Some(false)
+ }
+ // if there are closed issues and no open issues, default to showing all issues
+ if len(ctx.FormString("state")) == 0 && issueStats.OpenCount == 0 && issueStats.ClosedCount != 0 {
+ isShowClosed = optional.None[bool]()
+ }
+
+ if repo.IsTimetrackerEnabled(ctx) {
+ totalTrackedTime, err := issues_model.GetIssueTotalTrackedTime(ctx, statsOpts, isShowClosed)
+ if err != nil {
+ ctx.ServerError("GetIssueTotalTrackedTime", err)
+ return
+ }
+ ctx.Data["TotalTrackedTime"] = totalTrackedTime
+ }
+
+ archived := ctx.FormBool("archived")
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ var total int
+ switch {
+ case isShowClosed.Value():
+ total = int(issueStats.ClosedCount)
+ case !isShowClosed.Has():
+ total = int(issueStats.OpenCount + issueStats.ClosedCount)
+ default:
+ total = int(issueStats.OpenCount)
+ }
+ pager := context.NewPagination(total, setting.UI.IssuePagingNum, page, 5)
+
+ var issues issues_model.IssueList
+ {
+ ids, err := issueIDsFromSearch(ctx, keyword, isFuzzy, &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoIDs: []int64{repo.ID},
+ AssigneeID: assigneeID,
+ PosterID: posterID,
+ MentionedID: mentionedID,
+ ReviewRequestedID: reviewRequestedID,
+ ReviewedID: reviewedID,
+ MilestoneIDs: mileIDs,
+ ProjectID: projectID,
+ IsClosed: isShowClosed,
+ IsPull: isPullOption,
+ LabelIDs: labelIDs,
+ SortType: sortType,
+ })
+ if err != nil {
+ if issue_indexer.IsAvailable(ctx) {
+ ctx.ServerError("issueIDsFromSearch", err)
+ return
+ }
+ ctx.Data["IssueIndexerUnavailable"] = true
+ return
+ }
+ issues, err = issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ ctx.ServerError("GetIssuesByIDs", err)
+ return
+ }
+ }
+
+ approvalCounts, err := issues.GetApprovalCounts(ctx)
+ if err != nil {
+ ctx.ServerError("ApprovalCounts", err)
+ return
+ }
+
+ if ctx.IsSigned {
+ if err := issues.LoadIsRead(ctx, ctx.Doer.ID); err != nil {
+ ctx.ServerError("LoadIsRead", err)
+ return
+ }
+ } else {
+ for i := range issues {
+ issues[i].IsRead = true
+ }
+ }
+
+ commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues)
+ if err != nil {
+ ctx.ServerError("GetIssuesAllCommitStatus", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+
+ if err := issues.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("issues.LoadAttributes", err)
+ return
+ }
+
+ ctx.Data["Issues"] = issues
+ ctx.Data["CommitLastStatus"] = lastStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+
+ // Get assignees.
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return
+ }
+
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+
+ ctx.Data["OrgLabels"] = orgLabels
+ labels = append(labels, orgLabels...)
+ }
+
+ // Get the exclusive scope for every label ID
+ labelExclusiveScopes := make([]string, 0, len(labelIDs))
+ for _, labelID := range labelIDs {
+ foundExclusiveScope := false
+ for _, label := range labels {
+ if label.ID == labelID || label.ID == -labelID {
+ labelExclusiveScopes = append(labelExclusiveScopes, label.ExclusiveScope())
+ foundExclusiveScope = true
+ break
+ }
+ }
+ if !foundExclusiveScope {
+ labelExclusiveScopes = append(labelExclusiveScopes, "")
+ }
+ }
+
+ for _, l := range labels {
+ l.LoadSelectedLabelsAfterClick(labelIDs, labelExclusiveScopes)
+ }
+ ctx.Data["Labels"] = labels
+ ctx.Data["NumLabels"] = len(labels)
+
+ if ctx.FormInt64("assignee") == 0 {
+ assigneeID = 0 // Reset ID to prevent unexpected selection of assignee.
+ }
+
+ ctx.Data["IssueRefEndNames"], ctx.Data["IssueRefURLs"] = issue_service.GetRefEndNamesAndURLs(issues, ctx.Repo.RepoLink)
+
+ ctx.Data["ApprovalCounts"] = func(issueID int64, typ string) int64 {
+ counts, ok := approvalCounts[issueID]
+ if !ok || len(counts) == 0 {
+ return 0
+ }
+ reviewTyp := issues_model.ReviewTypeApprove
+ if typ == "reject" {
+ reviewTyp = issues_model.ReviewTypeReject
+ } else if typ == "waiting" {
+ reviewTyp = issues_model.ReviewTypeRequest
+ }
+ for _, count := range counts {
+ if count.Type == reviewTyp {
+ return count.Count
+ }
+ }
+ return 0
+ }
+
+ retrieveProjects(ctx, repo)
+ if ctx.Written() {
+ return
+ }
+
+ pinned, err := issues_model.GetPinnedIssues(ctx, repo.ID, isPullOption.Value())
+ if err != nil {
+ ctx.ServerError("GetPinnedIssues", err)
+ return
+ }
+
+ ctx.Data["PinnedIssues"] = pinned
+ ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.Doer.IsAdmin)
+ ctx.Data["IssueStats"] = issueStats
+ ctx.Data["OpenCount"] = issueStats.OpenCount
+ ctx.Data["ClosedCount"] = issueStats.ClosedCount
+ linkStr := "%s?q=%s&type=%s&sort=%s&state=%s&labels=%s&milestone=%d&project=%d&assignee=%d&poster=%d&archived=%t"
+ ctx.Data["AllStatesLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "all", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["OpenLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "open", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["ClosedLink"] = fmt.Sprintf(linkStr, ctx.Link,
+ url.QueryEscape(keyword), url.QueryEscape(viewType), url.QueryEscape(sortType), "closed", url.QueryEscape(selectLabels),
+ milestoneID, projectID, assigneeID, posterID, archived)
+ ctx.Data["SelLabelIDs"] = labelIDs
+ ctx.Data["SelectLabels"] = selectLabels
+ ctx.Data["ViewType"] = viewType
+ ctx.Data["SortType"] = sortType
+ ctx.Data["MilestoneID"] = milestoneID
+ ctx.Data["ProjectID"] = projectID
+ ctx.Data["AssigneeID"] = assigneeID
+ ctx.Data["PosterID"] = posterID
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["IsShowClosed"] = isShowClosed
+ switch {
+ case isShowClosed.Value():
+ ctx.Data["State"] = "closed"
+ case !isShowClosed.Has():
+ ctx.Data["State"] = "all"
+ default:
+ ctx.Data["State"] = "open"
+ }
+ ctx.Data["ShowArchivedLabels"] = archived
+
+ pager.AddParam(ctx, "q", "Keyword")
+ pager.AddParam(ctx, "type", "ViewType")
+ pager.AddParam(ctx, "sort", "SortType")
+ pager.AddParam(ctx, "state", "State")
+ pager.AddParam(ctx, "labels", "SelectLabels")
+ pager.AddParam(ctx, "milestone", "MilestoneID")
+ pager.AddParam(ctx, "project", "ProjectID")
+ pager.AddParam(ctx, "assignee", "AssigneeID")
+ pager.AddParam(ctx, "poster", "PosterID")
+ pager.AddParam(ctx, "archived", "ShowArchivedLabels")
+ pager.AddParam(ctx, "fuzzy", "IsFuzzy")
+
+ ctx.Data["Page"] = pager
+}
+
+func issueIDsFromSearch(ctx *context.Context, keyword string, fuzzy bool, opts *issues_model.IssuesOptions) ([]int64, error) {
+ ids, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts).Copy(
+ func(o *issue_indexer.SearchOptions) {
+ o.IsFuzzyKeyword = fuzzy
+ },
+ ))
+ if err != nil {
+ return nil, fmt.Errorf("SearchIssues: %w", err)
+ }
+ return ids, nil
+}
+
+// Issues render issues page
+func Issues(ctx *context.Context) {
+ isPullList := ctx.Params(":type") == "pulls"
+ if isPullList {
+ MustAllowPulls(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("repo.pulls")
+ ctx.Data["PageIsPullList"] = true
+ } else {
+ MustEnableIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("repo.issues")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ }
+
+ issues(ctx, ctx.FormInt64("milestone"), ctx.FormInt64("project"), optional.Some(isPullList))
+ if ctx.Written() {
+ return
+ }
+
+ renderMilestones(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.Data["CanWriteIssuesOrPulls"] = ctx.Repo.CanWriteIssuesOrPulls(isPullList)
+
+ ctx.HTML(http.StatusOK, tplIssues)
+}
+
+func renderMilestones(ctx *context.Context) {
+ // Get milestones
+ milestones, err := db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetAllRepoMilestones", err)
+ return
+ }
+
+ openMilestones, closedMilestones := issues_model.MilestoneList{}, issues_model.MilestoneList{}
+ for _, milestone := range milestones {
+ if milestone.IsClosed {
+ closedMilestones = append(closedMilestones, milestone)
+ } else {
+ openMilestones = append(openMilestones, milestone)
+ }
+ }
+ ctx.Data["OpenMilestones"] = openMilestones
+ ctx.Data["ClosedMilestones"] = closedMilestones
+}
+
+// RetrieveRepoMilestonesAndAssignees find all the milestones and assignees of a repository
+func RetrieveRepoMilestonesAndAssignees(ctx *context.Context, repo *repo_model.Repository) {
+ var err error
+ ctx.Data["OpenMilestones"], err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+ ctx.Data["ClosedMilestones"], err = db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+}
+
+func retrieveProjects(ctx *context.Context, repo *repo_model.Repository) {
+ // Distinguish whether the owner of the repository
+ // is an individual or an organization
+ repoOwnerType := project_model.TypeIndividual
+ if repo.Owner.IsOrganization() {
+ repoOwnerType = project_model.TypeOrganization
+ }
+ var err error
+ projects, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ RepoID: repo.ID,
+ IsClosed: optional.Some(false),
+ Type: project_model.TypeRepository,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+ projects2, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: repo.OwnerID,
+ IsClosed: optional.Some(false),
+ Type: repoOwnerType,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ ctx.Data["OpenProjects"] = append(projects, projects2...)
+
+ projects, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ RepoID: repo.ID,
+ IsClosed: optional.Some(true),
+ Type: project_model.TypeRepository,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+ projects2, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: repo.OwnerID,
+ IsClosed: optional.Some(true),
+ Type: repoOwnerType,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ ctx.Data["ClosedProjects"] = append(projects, projects2...)
+}
+
+// repoReviewerSelection items to bee shown
+type repoReviewerSelection struct {
+ IsTeam bool
+ Team *organization.Team
+ User *user_model.User
+ Review *issues_model.Review
+ CanChange bool
+ Checked bool
+ ItemID int64
+}
+
+// RetrieveRepoReviewers find all reviewers of a repository
+func RetrieveRepoReviewers(ctx *context.Context, repo *repo_model.Repository, issue *issues_model.Issue, canChooseReviewer bool) {
+ ctx.Data["CanChooseReviewer"] = canChooseReviewer
+
+ originalAuthorReviews, err := issues_model.GetReviewersFromOriginalAuthorsByIssueID(ctx, issue.ID)
+ if err != nil {
+ ctx.ServerError("GetReviewersFromOriginalAuthorsByIssueID", err)
+ return
+ }
+ ctx.Data["OriginalReviews"] = originalAuthorReviews
+
+ reviews, err := issues_model.GetReviewsByIssueID(ctx, issue.ID)
+ if err != nil {
+ ctx.ServerError("GetReviewersByIssueID", err)
+ return
+ }
+
+ if len(reviews) == 0 && !canChooseReviewer {
+ return
+ }
+
+ var (
+ pullReviews []*repoReviewerSelection
+ reviewersResult []*repoReviewerSelection
+ teamReviewersResult []*repoReviewerSelection
+ teamReviewers []*organization.Team
+ reviewers []*user_model.User
+ )
+
+ if canChooseReviewer {
+ posterID := issue.PosterID
+ if issue.OriginalAuthorID > 0 {
+ posterID = 0
+ }
+
+ reviewers, err = repo_model.GetReviewers(ctx, repo, ctx.Doer.ID, posterID)
+ if err != nil {
+ ctx.ServerError("GetReviewers", err)
+ return
+ }
+
+ teamReviewers, err = repo_service.GetReviewerTeams(ctx, repo)
+ if err != nil {
+ ctx.ServerError("GetReviewerTeams", err)
+ return
+ }
+
+ if len(reviewers) > 0 {
+ reviewersResult = make([]*repoReviewerSelection, 0, len(reviewers))
+ }
+
+ if len(teamReviewers) > 0 {
+ teamReviewersResult = make([]*repoReviewerSelection, 0, len(teamReviewers))
+ }
+ }
+
+ pullReviews = make([]*repoReviewerSelection, 0, len(reviews))
+
+ for _, review := range reviews {
+ tmp := &repoReviewerSelection{
+ Checked: review.Type == issues_model.ReviewTypeRequest,
+ Review: review,
+ ItemID: review.ReviewerID,
+ }
+ if review.ReviewerTeamID > 0 {
+ tmp.IsTeam = true
+ tmp.ItemID = -review.ReviewerTeamID
+ }
+
+ if canChooseReviewer {
+ // Users who can choose reviewers can also remove review requests
+ tmp.CanChange = true
+ } else if ctx.Doer != nil && ctx.Doer.ID == review.ReviewerID && review.Type == issues_model.ReviewTypeRequest {
+ // A user can refuse review requests
+ tmp.CanChange = true
+ }
+
+ pullReviews = append(pullReviews, tmp)
+
+ if canChooseReviewer {
+ if tmp.IsTeam {
+ teamReviewersResult = append(teamReviewersResult, tmp)
+ } else {
+ reviewersResult = append(reviewersResult, tmp)
+ }
+ }
+ }
+
+ if len(pullReviews) > 0 {
+ // Drop all non-existing users and teams from the reviews
+ currentPullReviewers := make([]*repoReviewerSelection, 0, len(pullReviews))
+ for _, item := range pullReviews {
+ if item.Review.ReviewerID > 0 {
+ if err = item.Review.LoadReviewer(ctx); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ continue
+ }
+ ctx.ServerError("LoadReviewer", err)
+ return
+ }
+ item.User = item.Review.Reviewer
+ } else if item.Review.ReviewerTeamID > 0 {
+ if err = item.Review.LoadReviewerTeam(ctx); err != nil {
+ if organization.IsErrTeamNotExist(err) {
+ continue
+ }
+ ctx.ServerError("LoadReviewerTeam", err)
+ return
+ }
+ item.Team = item.Review.ReviewerTeam
+ } else {
+ continue
+ }
+
+ currentPullReviewers = append(currentPullReviewers, item)
+ }
+ ctx.Data["PullReviewers"] = currentPullReviewers
+ }
+
+ if canChooseReviewer && reviewersResult != nil {
+ preadded := len(reviewersResult)
+ for _, reviewer := range reviewers {
+ found := false
+ reviewAddLoop:
+ for _, tmp := range reviewersResult[:preadded] {
+ if tmp.ItemID == reviewer.ID {
+ tmp.User = reviewer
+ found = true
+ break reviewAddLoop
+ }
+ }
+
+ if found {
+ continue
+ }
+
+ reviewersResult = append(reviewersResult, &repoReviewerSelection{
+ IsTeam: false,
+ CanChange: true,
+ User: reviewer,
+ ItemID: reviewer.ID,
+ })
+ }
+
+ ctx.Data["Reviewers"] = reviewersResult
+ }
+
+ if canChooseReviewer && teamReviewersResult != nil {
+ preadded := len(teamReviewersResult)
+ for _, team := range teamReviewers {
+ found := false
+ teamReviewAddLoop:
+ for _, tmp := range teamReviewersResult[:preadded] {
+ if tmp.ItemID == -team.ID {
+ tmp.Team = team
+ found = true
+ break teamReviewAddLoop
+ }
+ }
+
+ if found {
+ continue
+ }
+
+ teamReviewersResult = append(teamReviewersResult, &repoReviewerSelection{
+ IsTeam: true,
+ CanChange: true,
+ Team: team,
+ ItemID: -team.ID,
+ })
+ }
+
+ ctx.Data["TeamReviewers"] = teamReviewersResult
+ }
+}
+
+// RetrieveRepoMetas find all the meta information of a repository
+func RetrieveRepoMetas(ctx *context.Context, repo *repo_model.Repository, isPull bool) []*issues_model.Label {
+ if !ctx.Repo.CanWriteIssuesOrPulls(isPull) {
+ return nil
+ }
+
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return nil
+ }
+ ctx.Data["Labels"] = labels
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ return nil
+ }
+
+ ctx.Data["OrgLabels"] = orgLabels
+ labels = append(labels, orgLabels...)
+ }
+
+ RetrieveRepoMilestonesAndAssignees(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+
+ retrieveProjects(ctx, repo)
+ if ctx.Written() {
+ return nil
+ }
+
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return nil
+ }
+
+ // Contains true if the user can create issue dependencies
+ ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, isPull)
+
+ return labels
+}
+
+// Tries to load and set an issue template. The first return value indicates if a template was loaded.
+func setTemplateIfExists(ctx *context.Context, ctxDataKey string, possibleFiles []string) (bool, map[string]error) {
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ return false, nil
+ }
+
+ templateCandidates := make([]string, 0, 1+len(possibleFiles))
+ if t := ctx.FormString("template"); t != "" {
+ templateCandidates = append(templateCandidates, t)
+ }
+ templateCandidates = append(templateCandidates, possibleFiles...) // Append files to the end because they should be fallback
+
+ templateErrs := map[string]error{}
+ for _, filename := range templateCandidates {
+ if ok, _ := commit.HasFile(filename); !ok {
+ continue
+ }
+ template, err := issue_template.UnmarshalFromCommit(commit, filename)
+ if err != nil {
+ templateErrs[filename] = err
+ continue
+ }
+ ctx.Data[issueTemplateTitleKey] = template.Title
+ ctx.Data[ctxDataKey] = template.Content
+
+ if template.Type() == api.IssueTemplateTypeYaml {
+ // Replace field default values by values from query
+ for _, field := range template.Fields {
+ fieldValue := ctx.FormString("field:" + field.ID)
+ if fieldValue != "" {
+ field.Attributes["value"] = fieldValue
+ }
+ }
+
+ ctx.Data["Fields"] = template.Fields
+ ctx.Data["TemplateFile"] = template.FileName
+ }
+ labelIDs := make([]string, 0, len(template.Labels))
+ if repoLabels, err := issues_model.GetLabelsByRepoID(ctx, ctx.Repo.Repository.ID, "", db.ListOptions{}); err == nil {
+ ctx.Data["Labels"] = repoLabels
+ if ctx.Repo.Owner.IsOrganization() {
+ if orgLabels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{}); err == nil {
+ ctx.Data["OrgLabels"] = orgLabels
+ repoLabels = append(repoLabels, orgLabels...)
+ }
+ }
+
+ for _, metaLabel := range template.Labels {
+ for _, repoLabel := range repoLabels {
+ if strings.EqualFold(repoLabel.Name, metaLabel) {
+ repoLabel.IsChecked = true
+ labelIDs = append(labelIDs, strconv.FormatInt(repoLabel.ID, 10))
+ break
+ }
+ }
+ }
+ }
+
+ if template.Ref != "" && !strings.HasPrefix(template.Ref, "refs/") { // Assume that the ref intended is always a branch - for tags users should use refs/tags/<ref>
+ template.Ref = git.BranchPrefix + template.Ref
+ }
+ ctx.Data["HasSelectedLabel"] = len(labelIDs) > 0
+ ctx.Data["label_ids"] = strings.Join(labelIDs, ",")
+ ctx.Data["Reference"] = template.Ref
+ ctx.Data["RefEndName"] = git.RefName(template.Ref).ShortName()
+ return true, templateErrs
+ }
+ return false, templateErrs
+}
+
+// NewIssue render creating issue page
+func NewIssue(ctx *context.Context) {
+ issueConfig, _ := issue_service.GetTemplateConfigFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ hasTemplates := issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = hasTemplates
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ title := ctx.FormString("title")
+ ctx.Data["TitleQuery"] = title
+ body := ctx.FormString("body")
+ ctx.Data["BodyQuery"] = body
+
+ isProjectsEnabled := ctx.Repo.CanRead(unit.TypeProjects)
+ ctx.Data["IsProjectsEnabled"] = isProjectsEnabled
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ milestoneID := ctx.FormInt64("milestone")
+ if milestoneID > 0 {
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ log.Error("GetMilestoneByID: %d: %v", milestoneID, err)
+ } else {
+ ctx.Data["milestone_id"] = milestoneID
+ ctx.Data["Milestone"] = milestone
+ }
+ }
+
+ projectID := ctx.FormInt64("project")
+ if projectID > 0 && isProjectsEnabled {
+ project, err := project_model.GetProjectByID(ctx, projectID)
+ if err != nil {
+ log.Error("GetProjectByID: %d: %v", projectID, err)
+ } else if project.RepoID != ctx.Repo.Repository.ID {
+ log.Error("GetProjectByID: %d: %v", projectID, fmt.Errorf("project[%d] not in repo [%d]", project.ID, ctx.Repo.Repository.ID))
+ } else {
+ ctx.Data["project_id"] = projectID
+ ctx.Data["Project"] = project
+ }
+
+ if len(ctx.Req.URL.Query().Get("project")) > 0 {
+ ctx.Data["redirect_after_creation"] = "project"
+ }
+ }
+
+ RetrieveRepoMetas(ctx, ctx.Repo.Repository, false)
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ _, templateErrs := issue_service.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ templateLoaded, errs := setTemplateIfExists(ctx, issueTemplateKey, IssueTemplateCandidates)
+ for k, v := range errs {
+ templateErrs[k] = v
+ }
+ if ctx.Written() {
+ return
+ }
+
+ if len(templateErrs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, templateErrs), true)
+ }
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypeIssues)
+
+ if !issueConfig.BlankIssuesEnabled && hasTemplates && !templateLoaded {
+ // The "issues/new" and "issues/new/choose" share the same query parameters "project" and "milestone", if blank issues are disabled, just redirect to the "issues/choose" page with these parameters.
+ ctx.Redirect(fmt.Sprintf("%s/issues/new/choose?%s", ctx.Repo.Repository.Link(), ctx.Req.URL.RawQuery), http.StatusSeeOther)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplIssueNew)
+}
+
+func renderErrorOfTemplates(ctx *context.Context, errs map[string]error) template.HTML {
+ var files []string
+ for k := range errs {
+ files = append(files, k)
+ }
+ sort.Strings(files) // keep the output stable
+
+ var lines []string
+ for _, file := range files {
+ lines = append(lines, fmt.Sprintf("%s: %v", file, errs[file]))
+ }
+
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.issues.choose.ignore_invalid_templates"),
+ "Summary": ctx.Tr("repo.issues.choose.invalid_templates", len(errs)),
+ "Details": utils.SanitizeFlashErrorString(strings.Join(lines, "\n")),
+ })
+ if err != nil {
+ log.Debug("render flash error: %v", err)
+ flashError = ctx.Locale.Tr("repo.issues.choose.ignore_invalid_templates")
+ }
+ return flashError
+}
+
+// NewIssueChooseTemplate render creating issue from template page
+func NewIssueChooseTemplate(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+
+ issueTemplates, errs := issue_service.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["IssueTemplates"] = issueTemplates
+
+ if len(errs) > 0 {
+ ctx.Flash.Warning(renderErrorOfTemplates(ctx, errs), true)
+ }
+
+ if !issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo) {
+ // The "issues/new" and "issues/new/choose" share the same query parameters "project" and "milestone", if no template here, just redirect to the "issues/new" page with these parameters.
+ ctx.Redirect(fmt.Sprintf("%s/issues/new?%s", ctx.Repo.Repository.Link(), ctx.Req.URL.RawQuery), http.StatusSeeOther)
+ return
+ }
+
+ issueConfig, err := issue_service.GetTemplateConfigFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["IssueConfig"] = issueConfig
+ ctx.Data["IssueConfigError"] = err // ctx.Flash.Err makes problems here
+
+ ctx.Data["milestone"] = ctx.FormInt64("milestone")
+ ctx.Data["project"] = ctx.FormInt64("project")
+
+ ctx.HTML(http.StatusOK, tplIssueChoose)
+}
+
+// DeleteIssue deletes an issue
+func DeleteIssue(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := issue_service.DeleteIssue(ctx, ctx.Doer, ctx.Repo.GitRepo, issue); err != nil {
+ ctx.ServerError("DeleteIssueByID", err)
+ return
+ }
+
+ if issue.IsPull {
+ ctx.Redirect(fmt.Sprintf("%s/pulls", ctx.Repo.Repository.Link()), http.StatusSeeOther)
+ return
+ }
+
+ ctx.Redirect(fmt.Sprintf("%s/issues", ctx.Repo.Repository.Link()), http.StatusSeeOther)
+}
+
+// ValidateRepoMetas check and returns repository's meta information
+func ValidateRepoMetas(ctx *context.Context, form forms.CreateIssueForm, isPull bool) ([]int64, []int64, int64, int64) {
+ var (
+ repo = ctx.Repo.Repository
+ err error
+ )
+
+ labels := RetrieveRepoMetas(ctx, ctx.Repo.Repository, isPull)
+ if ctx.Written() {
+ return nil, nil, 0, 0
+ }
+
+ var labelIDs []int64
+ hasSelected := false
+ // Check labels.
+ if len(form.LabelIDs) > 0 {
+ labelIDs, err = base.StringsToInt64s(strings.Split(form.LabelIDs, ","))
+ if err != nil {
+ return nil, nil, 0, 0
+ }
+ labelIDMark := make(container.Set[int64])
+ labelIDMark.AddMultiple(labelIDs...)
+
+ for i := range labels {
+ if labelIDMark.Contains(labels[i].ID) {
+ labels[i].IsChecked = true
+ hasSelected = true
+ }
+ }
+ }
+
+ ctx.Data["Labels"] = labels
+ ctx.Data["HasSelectedLabel"] = hasSelected
+ ctx.Data["label_ids"] = form.LabelIDs
+
+ // Check milestone.
+ milestoneID := form.MilestoneID
+ if milestoneID > 0 {
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ ctx.ServerError("GetMilestoneByID", err)
+ return nil, nil, 0, 0
+ }
+ if milestone.RepoID != repo.ID {
+ ctx.ServerError("GetMilestoneByID", err)
+ return nil, nil, 0, 0
+ }
+ ctx.Data["Milestone"] = milestone
+ ctx.Data["milestone_id"] = milestoneID
+ }
+
+ if form.ProjectID > 0 {
+ p, err := project_model.GetProjectByID(ctx, form.ProjectID)
+ if err != nil {
+ ctx.ServerError("GetProjectByID", err)
+ return nil, nil, 0, 0
+ }
+ if p.RepoID != ctx.Repo.Repository.ID && p.OwnerID != ctx.Repo.Repository.OwnerID {
+ ctx.NotFound("", nil)
+ return nil, nil, 0, 0
+ }
+
+ ctx.Data["Project"] = p
+ ctx.Data["project_id"] = form.ProjectID
+ }
+
+ // Check assignees
+ var assigneeIDs []int64
+ if len(form.AssigneeIDs) > 0 {
+ assigneeIDs, err = base.StringsToInt64s(strings.Split(form.AssigneeIDs, ","))
+ if err != nil {
+ return nil, nil, 0, 0
+ }
+
+ // Check if the passed assignees actually exists and is assignable
+ for _, aID := range assigneeIDs {
+ assignee, err := user_model.GetUserByID(ctx, aID)
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return nil, nil, 0, 0
+ }
+
+ valid, err := access_model.CanBeAssigned(ctx, assignee, repo, isPull)
+ if err != nil {
+ ctx.ServerError("CanBeAssigned", err)
+ return nil, nil, 0, 0
+ }
+
+ if !valid {
+ ctx.ServerError("canBeAssigned", repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: aID, RepoName: repo.Name})
+ return nil, nil, 0, 0
+ }
+ }
+ }
+
+ // Keep the old assignee id thingy for compatibility reasons
+ if form.AssigneeID > 0 {
+ assigneeIDs = append(assigneeIDs, form.AssigneeID)
+ }
+
+ return labelIDs, assigneeIDs, milestoneID, form.ProjectID
+}
+
+// NewIssuePost response for creating new issue
+func NewIssuePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateIssueForm)
+ ctx.Data["Title"] = ctx.Tr("repo.issues.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ var (
+ repo = ctx.Repo.Repository
+ attachments []string
+ )
+
+ labelIDs, assigneeIDs, milestoneID, projectID := ValidateRepoMetas(ctx, *form, false)
+ if ctx.Written() {
+ return
+ }
+
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.JSONError(ctx.Tr("repo.issues.new.title_empty"))
+ return
+ }
+
+ content := form.Content
+ if filename := ctx.Req.Form.Get("template-file"); filename != "" {
+ if template, err := issue_template.UnmarshalFromRepo(ctx.Repo.GitRepo, ctx.Repo.Repository.DefaultBranch, filename); err == nil {
+ content = issue_template.RenderToMarkdown(template, ctx.Req.Form)
+ }
+ }
+
+ issue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Repo: repo,
+ Title: form.Title,
+ PosterID: ctx.Doer.ID,
+ Poster: ctx.Doer,
+ MilestoneID: milestoneID,
+ Content: content,
+ Ref: form.Ref,
+ }
+
+ if err := issue_service.NewIssue(ctx, repo, issue, labelIDs, attachments, assigneeIDs); err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.JSONError(ctx.Tr("repo.issues.blocked_by_user"))
+ return
+ } else if repo_model.IsErrUserDoesNotHaveAccessToRepo(err) {
+ ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
+ return
+ }
+ ctx.ServerError("NewIssue", err)
+ return
+ }
+
+ if projectID > 0 {
+ if !ctx.Repo.CanRead(unit.TypeProjects) {
+ // User must also be able to see the project.
+ ctx.Error(http.StatusBadRequest, "user hasn't permissions to read projects")
+ return
+ }
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectID, 0); err != nil {
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+
+ log.Trace("Issue created: %d/%d", repo.ID, issue.ID)
+ if ctx.FormString("redirect_after_creation") == "project" && projectID > 0 {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects/" + strconv.FormatInt(projectID, 10))
+ } else {
+ ctx.JSONRedirect(issue.Link())
+ }
+}
+
+// roleDescriptor returns the role descriptor for a comment in/with the given repo, poster and issue
+func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *user_model.User, issue *issues_model.Issue, hasOriginalAuthor bool) (issues_model.RoleDescriptor, error) {
+ roleDescriptor := issues_model.RoleDescriptor{}
+
+ if hasOriginalAuthor {
+ return roleDescriptor, nil
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, poster)
+ if err != nil {
+ return roleDescriptor, err
+ }
+
+ // If the poster is the actual poster of the issue, enable Poster role.
+ roleDescriptor.IsPoster = issue.IsPoster(poster.ID)
+
+ // Check if the poster is owner of the repo.
+ if perm.IsOwner() {
+ // If the poster isn't an admin, enable the owner role.
+ if !poster.IsAdmin {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner
+ return roleDescriptor, nil
+ }
+
+ // Otherwise check if poster is the real repo admin.
+ ok, err := access_model.IsUserRealRepoAdmin(ctx, repo, poster)
+ if err != nil {
+ return roleDescriptor, err
+ }
+ if ok {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoOwner
+ return roleDescriptor, nil
+ }
+ }
+
+ // If repo is organization, check Member role
+ if err := repo.LoadOwner(ctx); err != nil {
+ return roleDescriptor, err
+ }
+ if repo.Owner.IsOrganization() {
+ if isMember, err := organization.IsOrganizationMember(ctx, repo.Owner.ID, poster.ID); err != nil {
+ return roleDescriptor, err
+ } else if isMember {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoMember
+ return roleDescriptor, nil
+ }
+ }
+
+ // If the poster is the collaborator of the repo
+ if isCollaborator, err := repo_model.IsCollaborator(ctx, repo.ID, poster.ID); err != nil {
+ return roleDescriptor, err
+ } else if isCollaborator {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoCollaborator
+ return roleDescriptor, nil
+ }
+
+ hasMergedPR, err := issues_model.HasMergedPullRequestInRepo(ctx, repo.ID, poster.ID)
+ if err != nil {
+ return roleDescriptor, err
+ } else if hasMergedPR {
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoContributor
+ } else if issue.IsPull {
+ // only display first time contributor in the first opening pull request
+ roleDescriptor.RoleInRepo = issues_model.RoleRepoFirstTimeContributor
+ }
+
+ return roleDescriptor, nil
+}
+
+func getBranchData(ctx *context.Context, issue *issues_model.Issue) {
+ ctx.Data["BaseBranch"] = nil
+ ctx.Data["HeadBranch"] = nil
+ ctx.Data["HeadUserName"] = nil
+ ctx.Data["BaseName"] = ctx.Repo.Repository.OwnerName
+ if issue.IsPull {
+ pull := issue.PullRequest
+ ctx.Data["BaseBranch"] = pull.BaseBranch
+ ctx.Data["HeadBranch"] = pull.HeadBranch
+ ctx.Data["HeadUserName"] = pull.MustHeadUserName(ctx)
+ }
+}
+
+func prepareHiddenCommentType(ctx *context.Context) {
+ var hiddenCommentTypes *big.Int
+ if ctx.IsSigned {
+ val, err := user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes)
+ if err != nil {
+ ctx.ServerError("GetUserSetting", err)
+ return
+ }
+ hiddenCommentTypes, _ = new(big.Int).SetString(val, 10) // we can safely ignore the failed conversion here
+ }
+
+ ctx.Data["ShouldShowCommentType"] = func(commentType issues_model.CommentType) bool {
+ return hiddenCommentTypes == nil || hiddenCommentTypes.Bit(int(commentType)) == 0
+ }
+}
+
+// ViewIssue render issue view page
+func ViewIssue(ctx *context.Context) {
+ if ctx.Params(":type") == "issues" {
+ // If issue was requested we check if repo has external tracker and redirect
+ extIssueUnit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
+ if err == nil && extIssueUnit != nil {
+ if extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == markup.IssueNameStyleNumeric || extIssueUnit.ExternalTrackerConfig().ExternalTrackerStyle == "" {
+ metas := ctx.Repo.Repository.ComposeMetas(ctx)
+ metas["index"] = ctx.Params(":index")
+ res, err := vars.Expand(extIssueUnit.ExternalTrackerConfig().ExternalTrackerFormat, metas)
+ if err != nil {
+ log.Error("unable to expand template vars for issue url. issue: %s, err: %v", metas["index"], err)
+ ctx.ServerError("Expand", err)
+ return
+ }
+ ctx.Redirect(res)
+ return
+ }
+ } else if err != nil && !repo_model.IsErrUnitTypeNotExist(err) {
+ ctx.ServerError("GetUnit", err)
+ return
+ }
+ }
+
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.ServerError("GetIssueByIndex", err)
+ }
+ return
+ }
+ if issue.Repo == nil {
+ issue.Repo = ctx.Repo.Repository
+ }
+
+ // Make sure type and URL matches.
+ if ctx.Params(":type") == "issues" && issue.IsPull {
+ ctx.Redirect(issue.Link())
+ return
+ } else if ctx.Params(":type") == "pulls" && !issue.IsPull {
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ if issue.IsPull {
+ MustAllowPulls(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullConversation"] = true
+ } else {
+ MustEnableIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["NewIssueChooseTemplate"] = issue_service.HasTemplatesOrContactLinks(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ }
+
+ if issue.IsPull && !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.Data["IssueType"] = "pulls"
+ } else if !issue.IsPull && !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.Data["IssueType"] = "issues"
+ } else {
+ ctx.Data["IssueType"] = "all"
+ }
+
+ ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanRead(unit.TypeProjects)
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ if err = filterXRefComments(ctx, issue); err != nil {
+ ctx.ServerError("filterXRefComments", err)
+ return
+ }
+
+ ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, emoji.ReplaceAliases(issue.Title))
+
+ iw := new(issues_model.IssueWatch)
+ if ctx.Doer != nil {
+ iw.UserID = ctx.Doer.ID
+ iw.IssueID = issue.ID
+ iw.IsWatching, err = issues_model.CheckIssueWatch(ctx, ctx.Doer, issue)
+ if err != nil {
+ ctx.ServerError("CheckIssueWatch", err)
+ return
+ }
+ }
+ ctx.Data["IssueWatch"] = iw
+ issue.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, issue.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ repo := ctx.Repo.Repository
+
+ // Get more information if it's a pull request.
+ if issue.IsPull {
+ if issue.PullRequest.HasMerged {
+ ctx.Data["DisableStatusChange"] = issue.PullRequest.HasMerged
+ PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ PrepareViewPullInfo(ctx, issue)
+ ctx.Data["DisableStatusChange"] = ctx.Data["IsPullRequestBroken"] == true && issue.IsClosed
+ }
+ if ctx.Written() {
+ return
+ }
+ }
+
+ // Metas.
+ // Check labels.
+ labelIDMark := make(container.Set[int64])
+ for _, label := range issue.Labels {
+ labelIDMark.Add(label.ID)
+ }
+ labels, err := issues_model.GetLabelsByRepoID(ctx, repo.ID, "", db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByRepoID", err)
+ return
+ }
+ ctx.Data["Labels"] = labels
+
+ if repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+ ctx.Data["OrgLabels"] = orgLabels
+
+ labels = append(labels, orgLabels...)
+ }
+
+ hasSelected := false
+ for i := range labels {
+ if labelIDMark.Contains(labels[i].ID) {
+ labels[i].IsChecked = true
+ hasSelected = true
+ }
+ }
+ ctx.Data["HasSelectedLabel"] = hasSelected
+
+ // Check milestone and assignee.
+ if ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ RetrieveRepoMilestonesAndAssignees(ctx, repo)
+ retrieveProjects(ctx, repo)
+
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if issue.IsPull {
+ canChooseReviewer := false
+ if ctx.Doer != nil && ctx.IsSigned {
+ canChooseReviewer = issue_service.CanDoerChangeReviewRequests(ctx, ctx.Doer, repo, issue)
+ }
+
+ RetrieveRepoReviewers(ctx, repo, issue, canChooseReviewer)
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if ctx.IsSigned {
+ // Update issue-user.
+ if err = activities_model.SetIssueReadBy(ctx, issue.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return
+ }
+ }
+
+ var (
+ role issues_model.RoleDescriptor
+ ok bool
+ marked = make(map[int64]issues_model.RoleDescriptor)
+ comment *issues_model.Comment
+ participants = make([]*user_model.User, 1, 10)
+ latestCloseCommentID int64
+ )
+ if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) {
+ if ctx.IsSigned {
+ // Deal with the stopwatch
+ ctx.Data["IsStopwatchRunning"] = issues_model.StopwatchExists(ctx, ctx.Doer.ID, issue.ID)
+ if !ctx.Data["IsStopwatchRunning"].(bool) {
+ var exists bool
+ var swIssue *issues_model.Issue
+ if exists, _, swIssue, err = issues_model.HasUserStopwatch(ctx, ctx.Doer.ID); err != nil {
+ ctx.ServerError("HasUserStopwatch", err)
+ return
+ }
+ ctx.Data["HasUserStopwatch"] = exists
+ if exists {
+ // Add warning if the user has already a stopwatch
+ // Add link to the issue of the already running stopwatch
+ ctx.Data["OtherStopwatchURL"] = swIssue.Link()
+ }
+ }
+ ctx.Data["CanUseTimetracker"] = ctx.Repo.CanUseTimetracker(ctx, issue, ctx.Doer)
+ } else {
+ ctx.Data["CanUseTimetracker"] = false
+ }
+ if ctx.Data["WorkingUsers"], err = issues_model.TotalTimesForEachUser(ctx, &issues_model.FindTrackedTimesOptions{IssueID: issue.ID}); err != nil {
+ ctx.ServerError("TotalTimesForEachUser", err)
+ return
+ }
+ }
+
+ // Check if the user can use the dependencies
+ ctx.Data["CanCreateIssueDependencies"] = ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull)
+
+ // check if dependencies can be created across repositories
+ ctx.Data["AllowCrossRepositoryDependencies"] = setting.Service.AllowCrossRepositoryDependencies
+
+ if issue.ShowRole, err = roleDescriptor(ctx, repo, issue.Poster, issue, issue.HasOriginalAuthor()); err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[issue.PosterID] = issue.ShowRole
+
+ // Render comments and fetch participants.
+ participants[0] = issue.Poster
+
+ if err := issue.Comments.LoadAttachmentsByIssue(ctx); err != nil {
+ ctx.ServerError("LoadAttachmentsByIssue", err)
+ return
+ }
+ if err := issue.Comments.LoadPosters(ctx); err != nil {
+ ctx.ServerError("LoadPosters", err)
+ return
+ }
+
+ for _, comment = range issue.Comments {
+ comment.Issue = issue
+
+ if comment.Type == issues_model.CommentTypeComment || comment.Type == issues_model.CommentTypeReview {
+ comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ // Check tag.
+ role, ok = marked[comment.PosterID]
+ if ok {
+ comment.ShowRole = role
+ continue
+ }
+
+ comment.ShowRole, err = roleDescriptor(ctx, repo, comment.Poster, issue, comment.HasOriginalAuthor())
+ if err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[comment.PosterID] = comment.ShowRole
+ participants = addParticipant(comment.Poster, participants)
+ } else if comment.Type == issues_model.CommentTypeLabel {
+ if err = comment.LoadLabel(ctx); err != nil {
+ ctx.ServerError("LoadLabel", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypeMilestone {
+ if err = comment.LoadMilestone(ctx); err != nil {
+ ctx.ServerError("LoadMilestone", err)
+ return
+ }
+ ghostMilestone := &issues_model.Milestone{
+ ID: -1,
+ Name: ctx.Locale.TrString("repo.issues.deleted_milestone"),
+ }
+ if comment.OldMilestoneID > 0 && comment.OldMilestone == nil {
+ comment.OldMilestone = ghostMilestone
+ }
+ if comment.MilestoneID > 0 && comment.Milestone == nil {
+ comment.Milestone = ghostMilestone
+ }
+ } else if comment.Type == issues_model.CommentTypeProject {
+ if err = comment.LoadProject(ctx); err != nil {
+ ctx.ServerError("LoadProject", err)
+ return
+ }
+
+ ghostProject := &project_model.Project{
+ ID: project_model.GhostProjectID,
+ Title: ctx.Locale.TrString("repo.issues.deleted_project"),
+ }
+
+ if comment.OldProjectID > 0 && comment.OldProject == nil {
+ comment.OldProject = ghostProject
+ }
+
+ if comment.ProjectID > 0 && comment.Project == nil {
+ comment.Project = ghostProject
+ }
+ } else if comment.Type == issues_model.CommentTypeAssignees || comment.Type == issues_model.CommentTypeReviewRequest {
+ if err = comment.LoadAssigneeUserAndTeam(ctx); err != nil {
+ ctx.ServerError("LoadAssigneeUserAndTeam", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypeRemoveDependency || comment.Type == issues_model.CommentTypeAddDependency {
+ if err = comment.LoadDepIssueDetails(ctx); err != nil {
+ if !issues_model.IsErrIssueNotExist(err) {
+ ctx.ServerError("LoadDepIssueDetails", err)
+ return
+ }
+ }
+ } else if comment.Type.HasContentSupport() {
+ comment.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ if err = comment.LoadReview(ctx); err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("LoadReview", err)
+ return
+ }
+ participants = addParticipant(comment.Poster, participants)
+ if comment.Review == nil {
+ continue
+ }
+ if err = comment.Review.LoadAttributes(ctx); err != nil {
+ if !user_model.IsErrUserNotExist(err) {
+ ctx.ServerError("Review.LoadAttributes", err)
+ return
+ }
+ comment.Review.Reviewer = user_model.NewGhostUser()
+ }
+ if err = comment.Review.LoadCodeComments(ctx); err != nil {
+ ctx.ServerError("Review.LoadCodeComments", err)
+ return
+ }
+ for _, codeComments := range comment.Review.CodeComments {
+ for _, lineComments := range codeComments {
+ for _, c := range lineComments {
+ // Check tag.
+ role, ok = marked[c.PosterID]
+ if ok {
+ c.ShowRole = role
+ continue
+ }
+
+ c.ShowRole, err = roleDescriptor(ctx, repo, c.Poster, issue, c.HasOriginalAuthor())
+ if err != nil {
+ ctx.ServerError("roleDescriptor", err)
+ return
+ }
+ marked[c.PosterID] = c.ShowRole
+ participants = addParticipant(c.Poster, participants)
+ }
+ }
+ }
+ if err = comment.LoadResolveDoer(ctx); err != nil {
+ ctx.ServerError("LoadResolveDoer", err)
+ return
+ }
+ } else if comment.Type == issues_model.CommentTypePullRequestPush {
+ participants = addParticipant(comment.Poster, participants)
+ if err = comment.LoadPushCommits(ctx); err != nil {
+ ctx.ServerError("LoadPushCommits", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for _, commit := range comment.Commits {
+ if commit.Status == nil {
+ continue
+ }
+ commit.Status.HideActionsURL(ctx)
+ git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
+ }
+ }
+ } else if comment.Type == issues_model.CommentTypeAddTimeManual ||
+ comment.Type == issues_model.CommentTypeStopTracking ||
+ comment.Type == issues_model.CommentTypeDeleteTimeManual {
+ // drop error since times could be pruned from DB..
+ _ = comment.LoadTime(ctx)
+ if comment.Content != "" {
+ // Content before v1.21 did store the formatted string instead of seconds,
+ // so "|" is used as delimiter to mark the new format
+ if comment.Content[0] != '|' {
+ // handle old time comments that have formatted text stored
+ comment.RenderedContent = templates.SanitizeHTML(comment.Content)
+ comment.Content = ""
+ } else {
+ // else it's just a duration in seconds to pass on to the frontend
+ comment.Content = comment.Content[1:]
+ }
+ }
+ }
+
+ if comment.Type == issues_model.CommentTypeClose || comment.Type == issues_model.CommentTypeMergePull {
+ // record ID of the latest closed/merged comment.
+ // if PR is closed, the comments whose type is CommentTypePullRequestPush(29) after latestCloseCommentID won't be rendered.
+ latestCloseCommentID = comment.ID
+ }
+ }
+
+ ctx.Data["LatestCloseCommentID"] = latestCloseCommentID
+
+ // Combine multiple label assignments into a single comment
+ combineLabelComments(issue)
+
+ getBranchData(ctx, issue)
+ if issue.IsPull {
+ pull := issue.PullRequest
+ pull.Issue = issue
+ canDelete := false
+ allowMerge := false
+
+ if ctx.IsSigned {
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ log.Error("LoadHeadRepo: %v", err)
+ } else if pull.HeadRepo != nil {
+ perm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if perm.CanWrite(unit.TypeCode) {
+ // Check if branch is not protected
+ if pull.HeadBranch != pull.HeadRepo.DefaultBranch {
+ if protected, err := git_model.IsBranchProtected(ctx, pull.HeadRepo.ID, pull.HeadBranch); err != nil {
+ log.Error("IsProtectedBranch: %v", err)
+ } else if !protected {
+ canDelete = true
+ ctx.Data["DeleteBranchLink"] = issue.Link() + "/cleanup"
+ }
+ }
+ ctx.Data["CanWriteToHeadRepo"] = true
+ }
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ log.Error("LoadBaseRepo: %v", err)
+ }
+ perm, err := access_model.GetUserRepoPermission(ctx, pull.BaseRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ allowMerge, err = pull_service.IsUserAllowedToMerge(ctx, pull, perm, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToMerge", err)
+ return
+ }
+
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ }
+
+ ctx.Data["AllowMerge"] = allowMerge
+
+ prUnit, err := repo.GetUnit(ctx, unit.TypePullRequests)
+ if err != nil {
+ ctx.ServerError("GetUnit", err)
+ return
+ }
+ prConfig := prUnit.PullRequestsConfig()
+
+ ctx.Data["AutodetectManualMerge"] = prConfig.AutodetectManualMerge
+
+ var mergeStyle repo_model.MergeStyle
+ // Check correct values and select default
+ if ms, ok := ctx.Data["MergeStyle"].(repo_model.MergeStyle); !ok ||
+ !prConfig.IsMergeStyleAllowed(ms) {
+ defaultMergeStyle := prConfig.GetDefaultMergeStyle()
+ if prConfig.IsMergeStyleAllowed(defaultMergeStyle) && !ok {
+ mergeStyle = defaultMergeStyle
+ } else if prConfig.AllowMerge {
+ mergeStyle = repo_model.MergeStyleMerge
+ } else if prConfig.AllowRebase {
+ mergeStyle = repo_model.MergeStyleRebase
+ } else if prConfig.AllowRebaseMerge {
+ mergeStyle = repo_model.MergeStyleRebaseMerge
+ } else if prConfig.AllowSquash {
+ mergeStyle = repo_model.MergeStyleSquash
+ } else if prConfig.AllowFastForwardOnly {
+ mergeStyle = repo_model.MergeStyleFastForwardOnly
+ } else if prConfig.AllowManualMerge {
+ mergeStyle = repo_model.MergeStyleManuallyMerged
+ }
+ }
+
+ ctx.Data["MergeStyle"] = mergeStyle
+
+ defaultMergeMessage, defaultMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, mergeStyle)
+ if err != nil {
+ ctx.ServerError("GetDefaultMergeMessage", err)
+ return
+ }
+ ctx.Data["DefaultMergeMessage"] = defaultMergeMessage
+ ctx.Data["DefaultMergeBody"] = defaultMergeBody
+
+ defaultSquashMergeMessage, defaultSquashMergeBody, err := pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pull, repo_model.MergeStyleSquash)
+ if err != nil {
+ ctx.ServerError("GetDefaultSquashMergeMessage", err)
+ return
+ }
+ ctx.Data["DefaultSquashMergeMessage"] = defaultSquashMergeMessage
+ ctx.Data["DefaultSquashMergeBody"] = defaultSquashMergeBody
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return
+ }
+ ctx.Data["ShowMergeInstructions"] = true
+ if pb != nil {
+ pb.Repo = pull.BaseRepo
+ var showMergeInstructions bool
+ if ctx.Doer != nil {
+ showMergeInstructions = pb.CanUserPush(ctx, ctx.Doer)
+ }
+ ctx.Data["ProtectedBranch"] = pb
+ ctx.Data["IsBlockedByApprovals"] = !issues_model.HasEnoughApprovals(ctx, pb, pull)
+ ctx.Data["IsBlockedByRejection"] = issues_model.MergeBlockedByRejectedReview(ctx, pb, pull)
+ ctx.Data["IsBlockedByOfficialReviewRequests"] = issues_model.MergeBlockedByOfficialReviewRequests(ctx, pb, pull)
+ ctx.Data["IsBlockedByOutdatedBranch"] = issues_model.MergeBlockedByOutdatedBranch(pb, pull)
+ ctx.Data["GrantedApprovals"] = issues_model.GetGrantedApprovalsCount(ctx, pb, pull)
+ ctx.Data["RequireSigned"] = pb.RequireSignedCommits
+ ctx.Data["ChangedProtectedFiles"] = pull.ChangedProtectedFiles
+ ctx.Data["IsBlockedByChangedProtectedFiles"] = len(pull.ChangedProtectedFiles) != 0
+ ctx.Data["ChangedProtectedFilesNum"] = len(pull.ChangedProtectedFiles)
+ ctx.Data["ShowMergeInstructions"] = showMergeInstructions
+ }
+ ctx.Data["WillSign"] = false
+ if ctx.Doer != nil {
+ sign, key, _, err := asymkey_service.SignMerge(ctx, pull, ctx.Doer, pull.BaseRepo.RepoPath(), pull.BaseBranch, pull.GetGitRefName())
+ ctx.Data["WillSign"] = sign
+ ctx.Data["SigningKey"] = key
+ if err != nil {
+ if asymkey_service.IsErrWontSign(err) {
+ ctx.Data["WontSignReason"] = err.(*asymkey_service.ErrWontSign).Reason
+ } else {
+ ctx.Data["WontSignReason"] = "error"
+ log.Error("Error whilst checking if could sign pr %d in repo %s. Error: %v", pull.ID, pull.BaseRepo.FullName(), err)
+ }
+ }
+ } else {
+ ctx.Data["WontSignReason"] = "not_signed_in"
+ }
+
+ isPullBranchDeletable := canDelete &&
+ pull.HeadRepo != nil &&
+ git.IsBranchExist(ctx, pull.HeadRepo.RepoPath(), pull.HeadBranch) &&
+ (!pull.HasMerged || ctx.Data["HeadBranchCommitID"] == ctx.Data["PullHeadCommitID"])
+
+ if isPullBranchDeletable && pull.HasMerged {
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pull.HeadRepoID, pull.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+
+ isPullBranchDeletable = !exist
+ }
+ ctx.Data["IsPullBranchDeletable"] = isPullBranchDeletable
+
+ stillCanManualMerge := func() bool {
+ if pull.HasMerged || issue.IsClosed || !ctx.IsSigned {
+ return false
+ }
+ if pull.CanAutoMerge() || pull.IsWorkInProgress(ctx) || pull.IsChecking() {
+ return false
+ }
+ if allowMerge && prConfig.AllowManualMerge {
+ return true
+ }
+
+ return false
+ }
+
+ ctx.Data["StillCanManualMerge"] = stillCanManualMerge()
+
+ // Check if there is a pending pr merge
+ ctx.Data["HasPendingPullRequestMerge"], ctx.Data["PendingPullRequestMerge"], err = pull_model.GetScheduledMergeByPullID(ctx, pull.ID)
+ if err != nil {
+ ctx.ServerError("GetScheduledMergeByPullID", err)
+ return
+ }
+ }
+
+ // Get Dependencies
+ blockedBy, err := issue.BlockedByDependencies(ctx, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("BlockedByDependencies", err)
+ return
+ }
+ ctx.Data["BlockedByDependencies"], ctx.Data["BlockedByDependenciesNotPermitted"] = checkBlockedByIssues(ctx, blockedBy)
+ if ctx.Written() {
+ return
+ }
+
+ blocking, err := issue.BlockingDependencies(ctx)
+ if err != nil {
+ ctx.ServerError("BlockingDependencies", err)
+ return
+ }
+
+ ctx.Data["BlockingDependencies"], ctx.Data["BlockingDependenciesNotPermitted"] = checkBlockedByIssues(ctx, blocking)
+ if ctx.Written() {
+ return
+ }
+
+ var pinAllowed bool
+ if !issue.IsPinned() {
+ pinAllowed, err = issues_model.IsNewPinAllowed(ctx, issue.RepoID, issue.IsPull)
+ if err != nil {
+ ctx.ServerError("IsNewPinAllowed", err)
+ return
+ }
+ } else {
+ pinAllowed = true
+ }
+
+ ctx.Data["Participants"] = participants
+ ctx.Data["NumParticipants"] = len(participants)
+ ctx.Data["Issue"] = issue
+ ctx.Data["Reference"] = issue.Ref
+ ctx.Data["SignInLink"] = setting.AppSubURL + "/user/login?redirect_to=" + url.QueryEscape(ctx.Data["Link"].(string))
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.Data["HasProjectsWritePermission"] = ctx.Repo.CanWrite(unit.TypeProjects)
+ ctx.Data["IsRepoAdmin"] = ctx.IsSigned && (ctx.Repo.IsAdmin() || ctx.Doer.IsAdmin)
+ ctx.Data["LockReasons"] = setting.Repository.Issue.LockReasons
+ ctx.Data["RefEndName"] = git.RefName(issue.Ref).ShortName()
+ ctx.Data["NewPinAllowed"] = pinAllowed
+ ctx.Data["PinEnabled"] = setting.Repository.Issue.MaxPinned != 0
+
+ prepareHiddenCommentType(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // For sidebar
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ ctx.HTML(http.StatusOK, tplIssueView)
+}
+
+// checkBlockedByIssues return canRead and notPermitted
+func checkBlockedByIssues(ctx *context.Context, blockers []*issues_model.DependencyInfo) (canRead, notPermitted []*issues_model.DependencyInfo) {
+ repoPerms := make(map[int64]access_model.Permission)
+ repoPerms[ctx.Repo.Repository.ID] = ctx.Repo.Permission
+ for _, blocker := range blockers {
+ // Get the permissions for this repository
+ // If the repo ID exists in the map, return the exist permissions
+ // else get the permission and add it to the map
+ var perm access_model.Permission
+ existPerm, ok := repoPerms[blocker.RepoID]
+ if ok {
+ perm = existPerm
+ } else {
+ var err error
+ perm, err = access_model.GetUserRepoPermission(ctx, &blocker.Repository, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil, nil
+ }
+ repoPerms[blocker.RepoID] = perm
+ }
+ if perm.CanReadIssuesOrPulls(blocker.Issue.IsPull) {
+ canRead = append(canRead, blocker)
+ } else {
+ notPermitted = append(notPermitted, blocker)
+ }
+ }
+ sortDependencyInfo(canRead)
+ sortDependencyInfo(notPermitted)
+ return canRead, notPermitted
+}
+
+func sortDependencyInfo(blockers []*issues_model.DependencyInfo) {
+ sort.Slice(blockers, func(i, j int) bool {
+ if blockers[i].RepoID == blockers[j].RepoID {
+ return blockers[i].Issue.CreatedUnix < blockers[j].Issue.CreatedUnix
+ }
+ return blockers[i].RepoID < blockers[j].RepoID
+ })
+}
+
+// GetActionIssue will return the issue which is used in the context.
+func GetActionIssue(ctx *context.Context) *issues_model.Issue {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetIssueByIndex", issues_model.IsErrIssueNotExist, err)
+ return nil
+ }
+ issue.Repo = ctx.Repo.Repository
+ checkIssueRights(ctx, issue)
+ if ctx.Written() {
+ return nil
+ }
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return nil
+ }
+ return issue
+}
+
+func checkIssueRights(ctx *context.Context, issue *issues_model.Issue) {
+ if issue.IsPull && !ctx.Repo.CanRead(unit.TypePullRequests) ||
+ !issue.IsPull && !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
+ }
+}
+
+func getActionIssues(ctx *context.Context) issues_model.IssueList {
+ commaSeparatedIssueIDs := ctx.FormString("issue_ids")
+ if len(commaSeparatedIssueIDs) == 0 {
+ return nil
+ }
+ issueIDs := make([]int64, 0, 10)
+ for _, stringIssueID := range strings.Split(commaSeparatedIssueIDs, ",") {
+ issueID, err := strconv.ParseInt(stringIssueID, 10, 64)
+ if err != nil {
+ ctx.ServerError("ParseInt", err)
+ return nil
+ }
+ issueIDs = append(issueIDs, issueID)
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ ctx.ServerError("GetIssuesByIDs", err)
+ return nil
+ }
+ // Check access rights for all issues
+ issueUnitEnabled := ctx.Repo.CanRead(unit.TypeIssues)
+ prUnitEnabled := ctx.Repo.CanRead(unit.TypePullRequests)
+ for _, issue := range issues {
+ if issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("some issue's RepoID is incorrect", errors.New("some issue's RepoID is incorrect"))
+ return nil
+ }
+ if issue.IsPull && !prUnitEnabled || !issue.IsPull && !issueUnitEnabled {
+ ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
+ return nil
+ }
+ if err = issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return nil
+ }
+ }
+ return issues
+}
+
+// GetIssueInfo get an issue of a repository
+func GetIssueInfo(ctx *context.Context) {
+ issue, err := issues_model.GetIssueWithAttrsByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.Error(http.StatusNotFound)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
+ }
+ return
+ }
+
+ if issue.IsPull {
+ // Need to check if Pulls are enabled and we can read Pulls
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ } else {
+ // Need to check if Issues are enabled and we can read Issues
+ if !ctx.Repo.CanRead(unit.TypeIssues) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ ctx.JSON(http.StatusOK, convert.ToIssue(ctx, ctx.Doer, issue))
+}
+
+// UpdateIssueTitle change issue's title
+func UpdateIssueTitle(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ title := ctx.FormTrim("title")
+ if util.IsEmptyString(title) {
+ ctx.Error(http.StatusBadRequest, "Title cannot be empty or spaces")
+ return
+ }
+
+ // Creating a CreateIssueForm with the title so that we can validate the max title length
+ i := forms.CreateIssueForm{
+ Title: title,
+ }
+
+ bindingErr := binding.RawValidate(i)
+ if bindingErr.Has(binding.ERR_MAX_SIZE) {
+ ctx.Error(http.StatusBadRequest, "Title cannot be longer than 255 characters")
+ return
+ }
+
+ if err := issue_service.ChangeTitle(ctx, issue, ctx.Doer, title); err != nil {
+ ctx.ServerError("ChangeTitle", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "title": issue.Title,
+ })
+}
+
+// UpdateIssueRef change issue's ref (branch)
+func UpdateIssueRef(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) || issue.IsPull {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ ref := ctx.FormTrim("ref")
+
+ if err := issue_service.ChangeIssueRef(ctx, issue, ctx.Doer, ref); err != nil {
+ ctx.ServerError("ChangeRef", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ref": ref,
+ })
+}
+
+// UpdateIssueContent change issue's content
+func UpdateIssueContent(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if err := issue_service.ChangeContent(ctx, issue, ctx.Doer, ctx.Req.FormValue("content"), ctx.FormInt("content_version")); err != nil {
+ if errors.Is(err, issues_model.ErrIssueAlreadyChanged) {
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.pulls.edit.already_changed"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.edit.already_changed"))
+ }
+ } else {
+ ctx.ServerError("ChangeContent", err)
+ }
+ return
+ }
+
+ // when update the request doesn't intend to update attachments (eg: change checkbox state), ignore attachment updates
+ if !ctx.FormBool("ignore_attachments") {
+ if err := updateAttachments(ctx, issue, ctx.FormStrings("files[]")); err != nil {
+ ctx.ServerError("UpdateAttachments", err)
+ return
+ }
+ }
+
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, issue.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "content": content,
+ "contentVersion": issue.ContentVersion,
+ "attachments": attachmentsHTML(ctx, issue.Attachments, issue.Content),
+ })
+}
+
+// UpdateIssueDeadline updates an issue deadline
+func UpdateIssueDeadline(ctx *context.Context) {
+ form := web.GetForm(ctx).(*api.EditDeadlineOption)
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
+ }
+ return
+ }
+
+ if !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "", "Not repo writer")
+ return
+ }
+
+ var deadlineUnix timeutil.TimeStamp
+ var deadline time.Time
+ if form.Deadline != nil && !form.Deadline.IsZero() {
+ deadline = time.Date(form.Deadline.Year(), form.Deadline.Month(), form.Deadline.Day(),
+ 23, 59, 59, 0, time.Local)
+ deadlineUnix = timeutil.TimeStamp(deadline.Unix())
+ }
+
+ if err := issues_model.UpdateIssueDeadline(ctx, issue, deadlineUnix, ctx.Doer); err != nil {
+ ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err.Error())
+ return
+ }
+
+ ctx.JSON(http.StatusCreated, api.IssueDeadline{Deadline: &deadline})
+}
+
+// UpdateIssueMilestone change issue's milestone
+func UpdateIssueMilestone(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ milestoneID := ctx.FormInt64("id")
+ for _, issue := range issues {
+ oldMilestoneID := issue.MilestoneID
+ if oldMilestoneID == milestoneID {
+ continue
+ }
+ issue.MilestoneID = milestoneID
+ if err := issue_service.ChangeMilestoneAssign(ctx, issue, ctx.Doer, oldMilestoneID); err != nil {
+ ctx.ServerError("ChangeMilestoneAssign", err)
+ return
+ }
+ }
+
+ if ctx.FormBool("htmx") {
+ renderMilestones(ctx)
+ if ctx.Written() {
+ return
+ }
+ prepareHiddenCommentType(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ issue := issues[0]
+ var err error
+ if issue.MilestoneID > 0 {
+ issue.Milestone, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, issue.MilestoneID)
+ if err != nil {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ return
+ }
+ } else {
+ issue.Milestone = nil
+ }
+
+ comment := &issues_model.Comment{}
+ has, err := db.GetEngine(ctx).Where("issue_id = ? AND type = ?", issue.ID, issues_model.CommentTypeMilestone).OrderBy("id DESC").Limit(1).Get(comment)
+ if !has || err != nil {
+ ctx.ServerError("GetLatestMilestoneComment", err)
+ }
+ if err := comment.LoadMilestone(ctx); err != nil {
+ ctx.ServerError("LoadMilestone", err)
+ return
+ }
+ if err := comment.LoadPoster(ctx); err != nil {
+ ctx.ServerError("LoadPoster", err)
+ return
+ }
+ issue.Comments = issues_model.CommentList{comment}
+
+ ctx.Data["Issue"] = issue
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.HTML(http.StatusOK, "htmx/milestone_sidebar")
+ } else {
+ ctx.JSONOK()
+ }
+}
+
+// UpdateIssueAssignee change issue's or pull's assignee
+func UpdateIssueAssignee(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ assigneeID := ctx.FormInt64("id")
+ action := ctx.FormString("action")
+
+ for _, issue := range issues {
+ switch action {
+ case "clear":
+ if err := issue_service.DeleteNotPassedAssignee(ctx, issue, ctx.Doer, []*user_model.User{}); err != nil {
+ ctx.ServerError("ClearAssignees", err)
+ return
+ }
+ default:
+ assignee, err := user_model.GetUserByID(ctx, assigneeID)
+ if err != nil {
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ valid, err := access_model.CanBeAssigned(ctx, assignee, issue.Repo, issue.IsPull)
+ if err != nil {
+ ctx.ServerError("canBeAssigned", err)
+ return
+ }
+ if !valid {
+ ctx.ServerError("canBeAssigned", repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: assigneeID, RepoName: issue.Repo.Name})
+ return
+ }
+
+ _, _, err = issue_service.ToggleAssigneeWithNotify(ctx, issue, ctx.Doer, assigneeID)
+ if err != nil {
+ ctx.ServerError("ToggleAssignee", err)
+ return
+ }
+ }
+ }
+ ctx.JSONOK()
+}
+
+// UpdatePullReviewRequest add or remove review request
+func UpdatePullReviewRequest(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ reviewID := ctx.FormInt64("id")
+ action := ctx.FormString("action")
+
+ // TODO: Not support 'clear' now
+ if action != "attach" && action != "detach" {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ for _, issue := range issues {
+ if err := issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("issue.LoadRepo", err)
+ return
+ }
+
+ if !issue.IsPull {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add review request for non-PR issue %-v#%d",
+ issue.Repo, issue.Index,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ if reviewID < 0 {
+ // negative reviewIDs represent team requests
+ if err := issue.Repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("issue.Repo.LoadOwner", err)
+ return
+ }
+
+ if !issue.Repo.Owner.IsOrganization() {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add team review request for %s#%d owned by non organization UID[%d]",
+ issue.Repo.FullName(), issue.Index, issue.Repo.ID,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ team, err := organization.GetTeamByID(ctx, -reviewID)
+ if err != nil {
+ ctx.ServerError("GetTeamByID", err)
+ return
+ }
+
+ if team.OrgID != issue.Repo.OwnerID {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add team review request for UID[%d] team %s to %s#%d owned by UID[%d]",
+ team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID)
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+
+ err = issue_service.IsValidTeamReviewRequest(ctx, team, ctx.Doer, action == "attach", issue)
+ if err != nil {
+ if issues_model.IsErrNotValidReviewRequest(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add invalid team review request for UID[%d] team %s to %s#%d owned by UID[%d]: Error: %v",
+ team.OrgID, team.Name, issue.Repo.FullName(), issue.Index, issue.Repo.ID,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("IsValidTeamReviewRequest", err)
+ return
+ }
+
+ _, err = issue_service.TeamReviewRequest(ctx, issue, ctx.Doer, team, action == "attach")
+ if err != nil {
+ ctx.ServerError("TeamReviewRequest", err)
+ return
+ }
+ continue
+ }
+
+ reviewer, err := user_model.GetUserByID(ctx, reviewID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: requested reviewer [%d] for %-v to %-v#%d is not exist: Error: %v",
+ reviewID, issue.Repo, issue.Index,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("GetUserByID", err)
+ return
+ }
+
+ err = issue_service.IsValidReviewRequest(ctx, reviewer, ctx.Doer, action == "attach", issue, nil)
+ if err != nil {
+ if issues_model.IsErrNotValidReviewRequest(err) {
+ log.Warn(
+ "UpdatePullReviewRequest: refusing to add invalid review request for %-v to %-v#%d: Error: %v",
+ reviewer, issue.Repo, issue.Index,
+ err,
+ )
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("isValidReviewRequest", err)
+ return
+ }
+
+ _, err = issue_service.ReviewRequest(ctx, issue, ctx.Doer, reviewer, action == "attach")
+ if err != nil {
+ if issues_model.IsErrReviewRequestOnClosedPR(err) {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("ReviewRequest", err)
+ return
+ }
+ }
+
+ ctx.JSONOK()
+}
+
+// SearchIssues searches for issues across the repositories that the user has access to
+func SearchIssues(ctx *context.Context) {
+ before, since, err := context.GetQueryBeforeSince(ctx.Base)
+ if err != nil {
+ log.Error("GetQueryBeforeSince: %v", err)
+ ctx.Error(http.StatusUnprocessableEntity, "invalid before or since")
+ return
+ }
+
+ var isClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isClosed = optional.Some(true)
+ case "all":
+ isClosed = optional.None[bool]()
+ default:
+ isClosed = optional.Some(false)
+ }
+
+ var (
+ repoIDs []int64
+ allPublic bool
+ )
+ {
+ // find repos user can access (for issue search)
+ opts := &repo_model.SearchRepoOptions{
+ Private: false,
+ AllPublic: true,
+ TopicOnly: false,
+ Collaborate: optional.None[bool](),
+ // This needs to be a column that is not nil in fixtures or
+ // MySQL will return different results when sorting by null in some cases
+ OrderBy: db.SearchOrderByAlphabetically,
+ Actor: ctx.Doer,
+ }
+ if ctx.IsSigned {
+ opts.Private = true
+ opts.AllLimited = true
+ }
+ if ctx.FormString("owner") != "" {
+ owner, err := user_model.GetUserByName(ctx, ctx.FormString("owner"))
+ if err != nil {
+ log.Error("GetUserByName: %v", err)
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Error(http.StatusBadRequest, "Owner not found", err.Error())
+ } else {
+ ctx.Error(http.StatusInternalServerError)
+ }
+ return
+ }
+ opts.OwnerID = owner.ID
+ opts.AllLimited = false
+ opts.AllPublic = false
+ opts.Collaborate = optional.Some(false)
+ }
+ if ctx.FormString("team") != "" {
+ if ctx.FormString("owner") == "" {
+ ctx.Error(http.StatusBadRequest, "Owner organisation is required for filtering on team")
+ return
+ }
+ team, err := organization.GetTeam(ctx, opts.OwnerID, ctx.FormString("team"))
+ if err != nil {
+ log.Error("GetTeam: %v", err)
+ if organization.IsErrTeamNotExist(err) {
+ ctx.Error(http.StatusBadRequest)
+ } else {
+ ctx.Error(http.StatusInternalServerError)
+ }
+ return
+ }
+ opts.TeamID = team.ID
+ }
+
+ if opts.AllPublic {
+ allPublic = true
+ opts.AllPublic = false // set it false to avoid returning too many repos, we could filter by indexer
+ }
+ repoIDs, _, err = repo_model.SearchRepositoryIDs(ctx, opts)
+ if err != nil {
+ log.Error("SearchRepositoryIDs: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ if len(repoIDs) == 0 {
+ // no repos found, don't let the indexer return all repos
+ repoIDs = []int64{0}
+ }
+ }
+
+ keyword := ctx.FormTrim("q")
+ if strings.IndexByte(keyword, 0) >= 0 {
+ keyword = ""
+ }
+
+ isPull := optional.None[bool]()
+ switch ctx.FormString("type") {
+ case "pulls":
+ isPull = optional.Some(true)
+ case "issues":
+ isPull = optional.Some(false)
+ }
+
+ var includedAnyLabels []int64
+ {
+ labels := ctx.FormTrim("labels")
+ var includedLabelNames []string
+ if len(labels) > 0 {
+ includedLabelNames = strings.Split(labels, ",")
+ }
+ includedAnyLabels, err = issues_model.GetLabelIDsByNames(ctx, includedLabelNames)
+ if err != nil {
+ log.Error("GetLabelIDsByNames: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ var includedMilestones []int64
+ {
+ milestones := ctx.FormTrim("milestones")
+ var includedMilestoneNames []string
+ if len(milestones) > 0 {
+ includedMilestoneNames = strings.Split(milestones, ",")
+ }
+ includedMilestones, err = issues_model.GetMilestoneIDsByNames(ctx, includedMilestoneNames)
+ if err != nil {
+ log.Error("GetMilestoneIDsByNames: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ }
+
+ projectID := optional.None[int64]()
+ if v := ctx.FormInt64("project"); v > 0 {
+ projectID = optional.Some(v)
+ }
+
+ // this api is also used in UI,
+ // so the default limit is set to fit UI needs
+ limit := ctx.FormInt("limit")
+ if limit == 0 {
+ limit = setting.UI.IssuePagingNum
+ } else if limit > setting.API.MaxResponseItems {
+ limit = setting.API.MaxResponseItems
+ }
+
+ searchOpt := &issue_indexer.SearchOptions{
+ Paginator: &db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: limit,
+ },
+ Keyword: keyword,
+ RepoIDs: repoIDs,
+ AllPublic: allPublic,
+ IsPull: isPull,
+ IsClosed: isClosed,
+ IncludedAnyLabelIDs: includedAnyLabels,
+ MilestoneIDs: includedMilestones,
+ ProjectID: projectID,
+ SortBy: issue_indexer.SortByCreatedDesc,
+ }
+
+ if since != 0 {
+ searchOpt.UpdatedAfterUnix = optional.Some(since)
+ }
+ if before != 0 {
+ searchOpt.UpdatedBeforeUnix = optional.Some(before)
+ }
+
+ if ctx.IsSigned {
+ ctxUserID := ctx.Doer.ID
+ if ctx.FormBool("created") {
+ searchOpt.PosterID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("assigned") {
+ searchOpt.AssigneeID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("mentioned") {
+ searchOpt.MentionID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("review_requested") {
+ searchOpt.ReviewRequestedID = optional.Some(ctxUserID)
+ }
+ if ctx.FormBool("reviewed") {
+ searchOpt.ReviewedID = optional.Some(ctxUserID)
+ }
+ }
+
+ // FIXME: It's unsupported to sort by priority repo when searching by indexer,
+ // it's indeed an regression, but I think it is worth to support filtering by indexer first.
+ _ = ctx.FormInt64("priority_repo_id")
+
+ ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
+ if err != nil {
+ log.Error("SearchIssues: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ log.Error("GetIssuesByIDs: %v", err)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ ctx.SetTotalCountHeader(total)
+ ctx.JSON(http.StatusOK, convert.ToIssueList(ctx, ctx.Doer, issues))
+}
+
+func getUserIDForFilter(ctx *context.Context, queryName string) int64 {
+ userName := ctx.FormString(queryName)
+ if len(userName) == 0 {
+ return 0
+ }
+
+ user, err := user_model.GetUserByName(ctx, userName)
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("", err)
+ return 0
+ }
+
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return 0
+ }
+
+ return user.ID
+}
+
+// ListIssues list the issues of a repository
+func ListIssues(ctx *context.Context) {
+ before, since, err := context.GetQueryBeforeSince(ctx.Base)
+ if err != nil {
+ ctx.Error(http.StatusUnprocessableEntity, err.Error())
+ return
+ }
+
+ var isClosed optional.Option[bool]
+ switch ctx.FormString("state") {
+ case "closed":
+ isClosed = optional.Some(true)
+ case "all":
+ isClosed = optional.None[bool]()
+ default:
+ isClosed = optional.Some(false)
+ }
+
+ keyword := ctx.FormTrim("q")
+ if strings.IndexByte(keyword, 0) >= 0 {
+ keyword = ""
+ }
+
+ var labelIDs []int64
+ if split := strings.Split(ctx.FormString("labels"), ","); len(split) > 0 {
+ labelIDs, err = issues_model.GetLabelIDsInRepoByNames(ctx, ctx.Repo.Repository.ID, split)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ }
+
+ var mileIDs []int64
+ if part := strings.Split(ctx.FormString("milestones"), ","); len(part) > 0 {
+ for i := range part {
+ // uses names and fall back to ids
+ // non existent milestones are discarded
+ mile, err := issues_model.GetMilestoneByRepoIDANDName(ctx, ctx.Repo.Repository.ID, part[i])
+ if err == nil {
+ mileIDs = append(mileIDs, mile.ID)
+ continue
+ }
+ if !issues_model.IsErrMilestoneNotExist(err) {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+ id, err := strconv.ParseInt(part[i], 10, 64)
+ if err != nil {
+ continue
+ }
+ mile, err = issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, id)
+ if err == nil {
+ mileIDs = append(mileIDs, mile.ID)
+ continue
+ }
+ if issues_model.IsErrMilestoneNotExist(err) {
+ continue
+ }
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ }
+ }
+
+ projectID := optional.None[int64]()
+ if v := ctx.FormInt64("project"); v > 0 {
+ projectID = optional.Some(v)
+ }
+
+ isPull := optional.None[bool]()
+ switch ctx.FormString("type") {
+ case "pulls":
+ isPull = optional.Some(true)
+ case "issues":
+ isPull = optional.Some(false)
+ }
+
+ // FIXME: we should be more efficient here
+ createdByID := getUserIDForFilter(ctx, "created_by")
+ if ctx.Written() {
+ return
+ }
+ assignedByID := getUserIDForFilter(ctx, "assigned_by")
+ if ctx.Written() {
+ return
+ }
+ mentionedByID := getUserIDForFilter(ctx, "mentioned_by")
+ if ctx.Written() {
+ return
+ }
+
+ searchOpt := &issue_indexer.SearchOptions{
+ Paginator: &db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ Keyword: keyword,
+ RepoIDs: []int64{ctx.Repo.Repository.ID},
+ IsPull: isPull,
+ IsClosed: isClosed,
+ ProjectID: projectID,
+ SortBy: issue_indexer.SortByCreatedDesc,
+ }
+ if since != 0 {
+ searchOpt.UpdatedAfterUnix = optional.Some(since)
+ }
+ if before != 0 {
+ searchOpt.UpdatedBeforeUnix = optional.Some(before)
+ }
+ if len(labelIDs) == 1 && labelIDs[0] == 0 {
+ searchOpt.NoLabelOnly = true
+ } else {
+ for _, labelID := range labelIDs {
+ if labelID > 0 {
+ searchOpt.IncludedLabelIDs = append(searchOpt.IncludedLabelIDs, labelID)
+ } else {
+ searchOpt.ExcludedLabelIDs = append(searchOpt.ExcludedLabelIDs, -labelID)
+ }
+ }
+ }
+
+ if len(mileIDs) == 1 && mileIDs[0] == db.NoConditionID {
+ searchOpt.MilestoneIDs = []int64{0}
+ } else {
+ searchOpt.MilestoneIDs = mileIDs
+ }
+
+ if createdByID > 0 {
+ searchOpt.PosterID = optional.Some(createdByID)
+ }
+ if assignedByID > 0 {
+ searchOpt.AssigneeID = optional.Some(assignedByID)
+ }
+ if mentionedByID > 0 {
+ searchOpt.MentionID = optional.Some(mentionedByID)
+ }
+
+ ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "SearchIssues", err.Error())
+ return
+ }
+ issues, err := issues_model.GetIssuesByIDs(ctx, ids, true)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "FindIssuesByIDs", err.Error())
+ return
+ }
+
+ ctx.SetTotalCountHeader(total)
+ ctx.JSON(http.StatusOK, convert.ToIssueList(ctx, ctx.Doer, issues))
+}
+
+func BatchDeleteIssues(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+ for _, issue := range issues {
+ if err := issue_service.DeleteIssue(ctx, ctx.Doer, ctx.Repo.GitRepo, issue); err != nil {
+ ctx.ServerError("DeleteIssue", err)
+ return
+ }
+ }
+ ctx.JSONOK()
+}
+
+// UpdateIssueStatus change issue's status
+func UpdateIssueStatus(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ var isClosed bool
+ switch action := ctx.FormString("action"); action {
+ case "open":
+ isClosed = false
+ case "close":
+ isClosed = true
+ default:
+ log.Warn("Unrecognized action: %s", action)
+ }
+
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("LoadRepositories", err)
+ return
+ }
+ if err := issues.LoadPullRequests(ctx); err != nil {
+ ctx.ServerError("LoadPullRequests", err)
+ return
+ }
+
+ for _, issue := range issues {
+ if issue.IsPull && issue.PullRequest.HasMerged {
+ continue
+ }
+ if issue.IsClosed != isClosed {
+ if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", isClosed); err != nil {
+ if issues_model.IsErrDependenciesLeft(err) {
+ ctx.JSON(http.StatusPreconditionFailed, map[string]any{
+ "error": ctx.Tr("repo.issues.dependency.issue_batch_close_blocked", issue.Index),
+ })
+ return
+ }
+ ctx.ServerError("ChangeStatus", err)
+ return
+ }
+ }
+ }
+ ctx.JSONOK()
+}
+
+// NewComment create a comment for issue
+func NewComment(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateCommentForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin {
+ ctx.JSONError(ctx.Tr("repo.issues.comment_on_locked"))
+ return
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ var comment *issues_model.Comment
+ defer func() {
+ // Check if issue admin/poster changes the status of issue.
+ if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
+ (form.Status == "reopen" || form.Status == "close") &&
+ !(issue.IsPull && issue.PullRequest.HasMerged) {
+ // Duplication and conflict check should apply to reopen pull request.
+ var pr *issues_model.PullRequest
+
+ if form.Status == "reopen" && issue.IsPull {
+ pull := issue.PullRequest
+ var err error
+ pr, err = issues_model.GetUnmergedPullRequest(ctx, pull.HeadRepoID, pull.BaseRepoID, pull.HeadBranch, pull.BaseBranch, pull.Flow)
+ if err != nil {
+ if !issues_model.IsErrPullRequestNotExist(err) {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ return
+ }
+ }
+
+ // Regenerate patch and test conflict.
+ if pr == nil {
+ issue.PullRequest.HeadCommitID = ""
+ pull_service.AddToTaskQueue(ctx, issue.PullRequest)
+ }
+
+ // check whether the ref of PR <refs/pulls/pr_index/head> in base repo is consistent with the head commit of head branch in the head repo
+ // get head commit of PR
+ if pull.Flow == issues_model.PullRequestFlowGithub {
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("Unable to load base repo", err)
+ return
+ }
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("Unable to load head repo", err)
+ return
+ }
+
+ // Check if the base branch of the pull request still exists.
+ if ok := git.IsBranchExist(ctx, pull.BaseRepo.RepoPath(), pull.BaseBranch); !ok {
+ ctx.JSONError(ctx.Tr("repo.pulls.reopen_failed.base_branch"))
+ return
+ }
+
+ // Check if the head branch of the pull request still exists.
+ if ok := git.IsBranchExist(ctx, pull.HeadRepo.RepoPath(), pull.HeadBranch); !ok {
+ ctx.JSONError(ctx.Tr("repo.pulls.reopen_failed.head_branch"))
+ return
+ }
+
+ prHeadRef := pull.GetGitRefName()
+ prHeadCommitID, err := git.GetFullCommitID(ctx, pull.BaseRepo.RepoPath(), prHeadRef)
+ if err != nil {
+ ctx.ServerError("Get head commit Id of pr fail", err)
+ return
+ }
+
+ headBranchRef := pull.GetGitHeadBranchRefName()
+ headBranchCommitID, err := git.GetFullCommitID(ctx, pull.HeadRepo.RepoPath(), headBranchRef)
+ if err != nil {
+ ctx.ServerError("Get head commit Id of head branch fail", err)
+ return
+ }
+
+ err = pull.LoadIssue(ctx)
+ if err != nil {
+ ctx.ServerError("load the issue of pull request error", err)
+ return
+ }
+
+ if prHeadCommitID != headBranchCommitID {
+ // force push to base repo
+ err := git.Push(ctx, pull.HeadRepo.RepoPath(), git.PushOptions{
+ Remote: pull.BaseRepo.RepoPath(),
+ Branch: pull.HeadBranch + ":" + prHeadRef,
+ Force: true,
+ Env: repo_module.InternalPushingEnvironment(pull.Issue.Poster, pull.BaseRepo),
+ })
+ if err != nil {
+ ctx.ServerError("force push error", err)
+ return
+ }
+ }
+ }
+ }
+
+ if pr != nil {
+ ctx.Flash.Info(ctx.Tr("repo.pulls.open_unmerged_pull_exists", pr.Index))
+ } else {
+ isClosed := form.Status == "close"
+ if err := issue_service.ChangeStatus(ctx, issue, ctx.Doer, "", isClosed); err != nil {
+ log.Error("ChangeStatus: %v", err)
+
+ if issues_model.IsErrDependenciesLeft(err) {
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.issue_close_blocked"))
+ }
+ return
+ }
+ } else {
+ if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
+ ctx.ServerError("CreateOrStopIssueStopwatch", err)
+ return
+ }
+
+ log.Trace("Issue [%d] status changed to closed: %v", issue.ID, issue.IsClosed)
+ }
+ }
+ }
+
+ // Redirect to comment hashtag if there is any actual content.
+ typeName := "issues"
+ if issue.IsPull {
+ typeName = "pulls"
+ }
+ if comment != nil {
+ ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d#%s", ctx.Repo.RepoLink, typeName, issue.Index, comment.HashTag()))
+ } else {
+ ctx.JSONRedirect(fmt.Sprintf("%s/%s/%d", ctx.Repo.RepoLink, typeName, issue.Index))
+ }
+ }()
+
+ // Fix #321: Allow empty comments, as long as we have attachments.
+ if len(form.Content) == 0 && len(attachments) == 0 {
+ return
+ }
+
+ comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments)
+ if err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_by_user"))
+ } else {
+ ctx.ServerError("CreateIssueComment", err)
+ }
+ return
+ }
+
+ log.Trace("Comment created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, comment.ID)
+}
+
+// UpdateCommentContent change comment of issue's content
+func UpdateCommentContent(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ oldContent := comment.Content
+ newContent := ctx.FormString("content")
+ contentVersion := ctx.FormInt("content_version")
+
+ comment.Content = newContent
+ if err = issue_service.UpdateComment(ctx, comment, contentVersion, ctx.Doer, oldContent); err != nil {
+ if errors.Is(err, issues_model.ErrCommentAlreadyChanged) {
+ ctx.JSONError(ctx.Tr("repo.comments.edit.already_changed"))
+ } else {
+ ctx.ServerError("UpdateComment", err)
+ }
+ return
+ }
+
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+
+ // when the update request doesn't intend to update attachments (eg: change checkbox state), ignore attachment updates
+ if !ctx.FormBool("ignore_attachments") {
+ if err := updateAttachments(ctx, comment, ctx.FormStrings("files[]")); err != nil {
+ ctx.ServerError("UpdateAttachments", err)
+ return
+ }
+ }
+
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.FormString("context"), // FIXME: <- IS THIS SAFE ?
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, comment.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "content": content,
+ "contentVersion": comment.ContentVersion,
+ "attachments": attachmentsHTML(ctx, comment.Attachments, comment.Content),
+ })
+}
+
+// DeleteComment delete comment of issue
+func DeleteComment(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(comment.Issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ } else if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ if err = issue_service.DeleteComment(ctx, ctx.Doer, comment); err != nil {
+ ctx.ServerError("DeleteComment", err)
+ return
+ }
+
+ ctx.Status(http.StatusOK)
+}
+
+// ChangeIssueReaction create a reaction for issue
+func ChangeIssueReaction(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ReactionForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.ServerError("ChangeIssueReaction", errors.New(ctx.GetErrMsg()))
+ return
+ }
+
+ switch ctx.Params(":action") {
+ case "react":
+ reaction, err := issue_service.CreateIssueReaction(ctx, ctx.Doer, issue, form.Content)
+ if err != nil {
+ if issues_model.IsErrForbiddenIssueReaction(err) {
+ ctx.ServerError("ChangeIssueReaction", err)
+ return
+ }
+ log.Info("CreateIssueReaction: %s", err)
+ break
+ }
+
+ log.Trace("Reaction for issue created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, reaction.ID)
+ case "unreact":
+ if err := issues_model.DeleteIssueReaction(ctx, ctx.Doer.ID, issue.ID, form.Content); err != nil {
+ ctx.ServerError("DeleteIssueReaction", err)
+ return
+ }
+
+ log.Trace("Reaction for issue removed: %d/%d", ctx.Repo.Repository.ID, issue.ID)
+ default:
+ ctx.NotFound(fmt.Sprintf("Unknown action %s", ctx.Params(":action")), nil)
+ return
+ }
+
+ // Reload new reactions
+ issue.Reactions = nil
+ if err := issue.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("ChangeIssueReaction.LoadAttributes", err)
+ return
+ }
+
+ if len(issue.Reactions) == 0 {
+ ctx.JSON(http.StatusOK, map[string]any{
+ "empty": true,
+ "html": "",
+ })
+ return
+ }
+
+ html, err := ctx.RenderToHTML(tplReactions, map[string]any{
+ "ctxData": ctx.Data,
+ "ActionURL": fmt.Sprintf("%s/issues/%d/reactions", ctx.Repo.RepoLink, issue.Index),
+ "Reactions": issue.Reactions.GroupByType(),
+ })
+ if err != nil {
+ ctx.ServerError("ChangeIssueReaction.HTMLString", err)
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]any{
+ "html": html,
+ })
+}
+
+// ChangeCommentReaction create a reaction for comment
+func ChangeCommentReaction(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ReactionForm)
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != comment.PosterID && !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if comment.Issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ comment.Issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Type.HasContentSupport() {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ switch ctx.Params(":action") {
+ case "react":
+ reaction, err := issue_service.CreateCommentReaction(ctx, ctx.Doer, comment.Issue, comment, form.Content)
+ if err != nil {
+ if issues_model.IsErrForbiddenIssueReaction(err) {
+ ctx.ServerError("ChangeIssueReaction", err)
+ return
+ }
+ log.Info("CreateCommentReaction: %s", err)
+ break
+ }
+
+ log.Trace("Reaction for comment created: %d/%d/%d/%d", ctx.Repo.Repository.ID, comment.Issue.ID, comment.ID, reaction.ID)
+ case "unreact":
+ if err := issues_model.DeleteCommentReaction(ctx, ctx.Doer.ID, comment.Issue.ID, comment.ID, form.Content); err != nil {
+ ctx.ServerError("DeleteCommentReaction", err)
+ return
+ }
+
+ log.Trace("Reaction for comment removed: %d/%d/%d", ctx.Repo.Repository.ID, comment.Issue.ID, comment.ID)
+ default:
+ ctx.NotFound(fmt.Sprintf("Unknown action %s", ctx.Params(":action")), nil)
+ return
+ }
+
+ // Reload new reactions
+ comment.Reactions = nil
+ if err = comment.LoadReactions(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("ChangeCommentReaction.LoadReactions", err)
+ return
+ }
+
+ if len(comment.Reactions) == 0 {
+ ctx.JSON(http.StatusOK, map[string]any{
+ "empty": true,
+ "html": "",
+ })
+ return
+ }
+
+ html, err := ctx.RenderToHTML(tplReactions, map[string]any{
+ "ctxData": ctx.Data,
+ "ActionURL": fmt.Sprintf("%s/comments/%d/reactions", ctx.Repo.RepoLink, comment.ID),
+ "Reactions": comment.Reactions.GroupByType(),
+ })
+ if err != nil {
+ ctx.ServerError("ChangeCommentReaction.HTMLString", err)
+ return
+ }
+ ctx.JSON(http.StatusOK, map[string]any{
+ "html": html,
+ })
+}
+
+func addParticipant(poster *user_model.User, participants []*user_model.User) []*user_model.User {
+ for _, part := range participants {
+ if poster.ID == part.ID {
+ return participants
+ }
+ }
+ return append(participants, poster)
+}
+
+func filterXRefComments(ctx *context.Context, issue *issues_model.Issue) error {
+ // Remove comments that the user has no permissions to see
+ for i := 0; i < len(issue.Comments); {
+ c := issue.Comments[i]
+ if issues_model.CommentTypeIsRef(c.Type) && c.RefRepoID != issue.RepoID && c.RefRepoID != 0 {
+ var err error
+ // Set RefRepo for description in template
+ c.RefRepo, err = repo_model.GetRepositoryByID(ctx, c.RefRepoID)
+ if err != nil {
+ return err
+ }
+ perm, err := access_model.GetUserRepoPermission(ctx, c.RefRepo, ctx.Doer)
+ if err != nil {
+ return err
+ }
+ if !perm.CanReadIssuesOrPulls(c.RefIsPull) {
+ issue.Comments = append(issue.Comments[:i], issue.Comments[i+1:]...)
+ continue
+ }
+ }
+ i++
+ }
+ return nil
+}
+
+// GetIssueAttachments returns attachments for the issue
+func GetIssueAttachments(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ attachments := make([]*api.Attachment, len(issue.Attachments))
+ for i := 0; i < len(issue.Attachments); i++ {
+ attachments[i] = convert.ToAttachment(ctx.Repo.Repository, issue.Attachments[i])
+ }
+ ctx.JSON(http.StatusOK, attachments)
+}
+
+// GetCommentAttachments returns attachments for the comment
+func GetCommentAttachments(ctx *context.Context) {
+ comment, err := issues_model.GetCommentByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetCommentByID", issues_model.IsErrCommentNotExist, err)
+ return
+ }
+
+ if err := comment.LoadIssue(ctx); err != nil {
+ ctx.NotFoundOrServerError("LoadIssue", issues_model.IsErrIssueNotExist, err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !ctx.Repo.Permission.CanReadIssuesOrPulls(comment.Issue.IsPull) {
+ ctx.NotFound("CanReadIssuesOrPulls", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if !comment.Type.HasAttachmentSupport() {
+ ctx.ServerError("GetCommentAttachments", fmt.Errorf("comment type %v does not support attachments", comment.Type))
+ return
+ }
+
+ attachments := make([]*api.Attachment, 0)
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+ for i := 0; i < len(comment.Attachments); i++ {
+ attachments = append(attachments, convert.ToAttachment(ctx.Repo.Repository, comment.Attachments[i]))
+ }
+ ctx.JSON(http.StatusOK, attachments)
+}
+
+func updateAttachments(ctx *context.Context, item any, files []string) error {
+ var attachments []*repo_model.Attachment
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ attachments = content.Attachments
+ case *issues_model.Comment:
+ attachments = content.Attachments
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ for i := 0; i < len(attachments); i++ {
+ if util.SliceContainsString(files, attachments[i].UUID) {
+ continue
+ }
+ if err := repo_model.DeleteAttachment(ctx, attachments[i], true); err != nil {
+ return err
+ }
+ }
+ var err error
+ if len(files) > 0 {
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ err = issues_model.UpdateIssueAttachments(ctx, content.ID, files)
+ case *issues_model.Comment:
+ err = content.UpdateAttachments(ctx, files)
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ if err != nil {
+ return err
+ }
+ }
+ switch content := item.(type) {
+ case *issues_model.Issue:
+ content.Attachments, err = repo_model.GetAttachmentsByIssueID(ctx, content.ID)
+ case *issues_model.Comment:
+ content.Attachments, err = repo_model.GetAttachmentsByCommentID(ctx, content.ID)
+ default:
+ return fmt.Errorf("unknown Type: %T", content)
+ }
+ return err
+}
+
+func attachmentsHTML(ctx *context.Context, attachments []*repo_model.Attachment, content string) template.HTML {
+ attachHTML, err := ctx.RenderToHTML(tplAttachment, map[string]any{
+ "ctxData": ctx.Data,
+ "Attachments": attachments,
+ "Content": content,
+ })
+ if err != nil {
+ ctx.ServerError("attachmentsHTML.HTMLString", err)
+ return ""
+ }
+ return attachHTML
+}
+
+// combineLabelComments combine the nearby label comments as one.
+func combineLabelComments(issue *issues_model.Issue) {
+ var prev, cur *issues_model.Comment
+ for i := 0; i < len(issue.Comments); i++ {
+ cur = issue.Comments[i]
+ if i > 0 {
+ prev = issue.Comments[i-1]
+ }
+ if i == 0 || cur.Type != issues_model.CommentTypeLabel ||
+ (prev != nil && prev.PosterID != cur.PosterID) ||
+ (prev != nil && cur.CreatedUnix-prev.CreatedUnix >= 60) {
+ if cur.Type == issues_model.CommentTypeLabel && cur.Label != nil {
+ if cur.Content != "1" {
+ cur.RemovedLabels = append(cur.RemovedLabels, cur.Label)
+ } else {
+ cur.AddedLabels = append(cur.AddedLabels, cur.Label)
+ }
+ }
+ continue
+ }
+
+ if cur.Label != nil { // now cur MUST be label comment
+ if prev.Type == issues_model.CommentTypeLabel { // we can combine them only prev is a label comment
+ if cur.Content != "1" {
+ // remove labels from the AddedLabels list if the label that was removed is already
+ // in this list, and if it's not in this list, add the label to RemovedLabels
+ addedAndRemoved := false
+ for i, label := range prev.AddedLabels {
+ if cur.Label.ID == label.ID {
+ prev.AddedLabels = append(prev.AddedLabels[:i], prev.AddedLabels[i+1:]...)
+ addedAndRemoved = true
+ break
+ }
+ }
+ if !addedAndRemoved {
+ prev.RemovedLabels = append(prev.RemovedLabels, cur.Label)
+ }
+ } else {
+ // remove labels from the RemovedLabels list if the label that was added is already
+ // in this list, and if it's not in this list, add the label to AddedLabels
+ removedAndAdded := false
+ for i, label := range prev.RemovedLabels {
+ if cur.Label.ID == label.ID {
+ prev.RemovedLabels = append(prev.RemovedLabels[:i], prev.RemovedLabels[i+1:]...)
+ removedAndAdded = true
+ break
+ }
+ }
+ if !removedAndAdded {
+ prev.AddedLabels = append(prev.AddedLabels, cur.Label)
+ }
+ }
+ prev.CreatedUnix = cur.CreatedUnix
+ // remove the current comment since it has been combined to prev comment
+ issue.Comments = append(issue.Comments[:i], issue.Comments[i+1:]...)
+ i--
+ } else { // if prev is not a label comment, start a new group
+ if cur.Content != "1" {
+ cur.RemovedLabels = append(cur.RemovedLabels, cur.Label)
+ } else {
+ cur.AddedLabels = append(cur.AddedLabels, cur.Label)
+ }
+ }
+ }
+ }
+}
+
+// get all teams that current user can mention
+func handleTeamMentions(ctx *context.Context) {
+ if ctx.Doer == nil || !ctx.Repo.Owner.IsOrganization() {
+ return
+ }
+
+ var isAdmin bool
+ var err error
+ var teams []*organization.Team
+ org := organization.OrgFromUser(ctx.Repo.Owner)
+ // Admin has super access.
+ if ctx.Doer.IsAdmin {
+ isAdmin = true
+ } else {
+ isAdmin, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("IsOwnedBy", err)
+ return
+ }
+ }
+
+ if isAdmin {
+ teams, err = org.LoadTeams(ctx)
+ if err != nil {
+ ctx.ServerError("LoadTeams", err)
+ return
+ }
+ } else {
+ teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserTeams", err)
+ return
+ }
+ }
+
+ ctx.Data["MentionableTeams"] = teams
+ ctx.Data["MentionableTeamsOrg"] = ctx.Repo.Owner.Name
+ ctx.Data["MentionableTeamsOrgAvatar"] = ctx.Repo.Owner.AvatarLink(ctx)
+}
+
+type userSearchInfo struct {
+ UserID int64 `json:"user_id"`
+ UserName string `json:"username"`
+ AvatarLink string `json:"avatar_link"`
+ FullName string `json:"full_name"`
+}
+
+type userSearchResponse struct {
+ Results []*userSearchInfo `json:"results"`
+}
+
+// IssuePosters get posters for current repo's issues/pull requests
+func IssuePosters(ctx *context.Context) {
+ issuePosters(ctx, false)
+}
+
+func PullPosters(ctx *context.Context) {
+ issuePosters(ctx, true)
+}
+
+func issuePosters(ctx *context.Context, isPullList bool) {
+ repo := ctx.Repo.Repository
+ search := strings.TrimSpace(ctx.FormString("q"))
+ posters, err := repo_model.GetIssuePostersWithSearch(ctx, repo, isPullList, search, setting.UI.DefaultShowFullName)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+
+ if search == "" && ctx.Doer != nil {
+ // the returned posters slice only contains limited number of users,
+ // to make the current user (doer) can quickly filter their own issues, always add doer to the posters slice
+ if !slices.ContainsFunc(posters, func(user *user_model.User) bool { return user.ID == ctx.Doer.ID }) {
+ posters = append(posters, ctx.Doer)
+ }
+ }
+
+ posters = MakeSelfOnTop(ctx.Doer, posters)
+
+ resp := &userSearchResponse{}
+ resp.Results = make([]*userSearchInfo, len(posters))
+ for i, user := range posters {
+ resp.Results[i] = &userSearchInfo{UserID: user.ID, UserName: user.Name, AvatarLink: user.AvatarLink(ctx)}
+ if setting.UI.DefaultShowFullName {
+ resp.Results[i].FullName = user.FullName
+ }
+ }
+ ctx.JSON(http.StatusOK, resp)
+}
diff --git a/routers/web/repo/issue_content_history.go b/routers/web/repo/issue_content_history.go
new file mode 100644
index 0000000..16b250a
--- /dev/null
+++ b/routers/web/repo/issue_content_history.go
@@ -0,0 +1,237 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "html"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/avatars"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/sergi/go-diff/diffmatchpatch"
+)
+
+// GetContentHistoryOverview get overview
+func GetContentHistoryOverview(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ editedHistoryCountMap, _ := issues_model.QueryIssueContentHistoryEditedCountMap(ctx, issue.ID)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "i18n": map[string]any{
+ "textEdited": ctx.Tr("repo.issues.content_history.edited"),
+ "textDeleteFromHistory": ctx.Tr("repo.issues.content_history.delete_from_history"),
+ "textDeleteFromHistoryConfirm": ctx.Tr("repo.issues.content_history.delete_from_history_confirm"),
+ "textOptions": ctx.Tr("repo.issues.content_history.options"),
+ },
+ "editedHistoryCountMap": editedHistoryCountMap,
+ })
+}
+
+// GetContentHistoryList get list
+func GetContentHistoryList(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ commentID := ctx.FormInt64("comment_id")
+ items, _ := issues_model.FetchIssueContentHistoryList(ctx, issue.ID, commentID)
+
+ // render history list to HTML for frontend dropdown items: (name, value)
+ // name is HTML of "avatar + userName + userAction + timeSince"
+ // value is historyId
+ var results []map[string]any
+ for _, item := range items {
+ var actionText string
+ if item.IsDeleted {
+ actionTextDeleted := ctx.Locale.TrString("repo.issues.content_history.deleted")
+ actionText = "<i data-history-is-deleted='1'>" + actionTextDeleted + "</i>"
+ } else if item.IsFirstCreated {
+ actionText = ctx.Locale.TrString("repo.issues.content_history.created")
+ } else {
+ actionText = ctx.Locale.TrString("repo.issues.content_history.edited")
+ }
+
+ username := item.UserName
+ if setting.UI.DefaultShowFullName && strings.TrimSpace(item.UserFullName) != "" {
+ username = strings.TrimSpace(item.UserFullName)
+ }
+
+ src := html.EscapeString(item.UserAvatarLink)
+ class := avatars.DefaultAvatarClass + " tw-mr-2"
+ name := html.EscapeString(username)
+ avatarHTML := string(templates.AvatarHTML(src, 28, class, username))
+ timeSinceText := string(timeutil.TimeSinceUnix(item.EditedUnix, ctx.Locale))
+
+ results = append(results, map[string]any{
+ "name": avatarHTML + "<strong>" + name + "</strong> " + actionText + " " + timeSinceText,
+ "value": item.HistoryID,
+ })
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "results": results,
+ })
+}
+
+// canSoftDeleteContentHistory checks whether current user can soft-delete a history revision
+// Admins or owners can always delete history revisions. Normal users can only delete own history revisions.
+func canSoftDeleteContentHistory(ctx *context.Context, issue *issues_model.Issue, comment *issues_model.Comment,
+ history *issues_model.ContentHistory,
+) (canSoftDelete bool) {
+ // CanWrite means the doer can manage the issue/PR list
+ if ctx.Repo.IsOwner() || ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
+ canSoftDelete = true
+ } else if ctx.Doer == nil {
+ canSoftDelete = false
+ } else {
+ // for read-only users, they could still post issues or comments,
+ // they should be able to delete the history related to their own issue/comment, a case is:
+ // 1. the user posts some sensitive data
+ // 2. then the repo owner edits the post but didn't remove the sensitive data
+ // 3. the poster wants to delete the edited history revision
+ if comment == nil {
+ // the issue poster or the history poster can soft-delete
+ canSoftDelete = ctx.Doer.ID == issue.PosterID || ctx.Doer.ID == history.PosterID
+ canSoftDelete = canSoftDelete && (history.IssueID == issue.ID)
+ } else {
+ // the comment poster or the history poster can soft-delete
+ canSoftDelete = ctx.Doer.ID == comment.PosterID || ctx.Doer.ID == history.PosterID
+ canSoftDelete = canSoftDelete && (history.IssueID == issue.ID)
+ canSoftDelete = canSoftDelete && (history.CommentID == comment.ID)
+ }
+ }
+ return canSoftDelete
+}
+
+// GetContentHistoryDetail get detail
+func GetContentHistoryDetail(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ historyID := ctx.FormInt64("history_id")
+ history, prevHistory, err := issues_model.GetIssueContentHistoryAndPrev(ctx, issue.ID, historyID)
+ if err != nil {
+ ctx.JSON(http.StatusNotFound, map[string]any{
+ "message": "Can not find the content history",
+ })
+ return
+ }
+
+ // get the related comment if this history revision is for a comment, otherwise the history revision is for an issue.
+ var comment *issues_model.Comment
+ if history.CommentID != 0 {
+ var err error
+ if comment, err = issues_model.GetCommentByID(ctx, history.CommentID); err != nil {
+ log.Error("can not get comment for issue content history %v. err=%v", historyID, err)
+ return
+ }
+ }
+
+ // get the previous history revision (if exists)
+ var prevHistoryID int64
+ var prevHistoryContentText string
+ if prevHistory != nil {
+ prevHistoryID = prevHistory.ID
+ prevHistoryContentText = prevHistory.ContentText
+ }
+
+ // compare the current history revision with the previous one
+ dmp := diffmatchpatch.New()
+ // `checklines=false` makes better diff result
+ diff := dmp.DiffMain(prevHistoryContentText, history.ContentText, false)
+ diff = dmp.DiffCleanupSemantic(diff)
+ diff = dmp.DiffCleanupEfficiency(diff)
+
+ // use chroma to render the diff html
+ diffHTMLBuf := bytes.Buffer{}
+ diffHTMLBuf.WriteString("<pre class='chroma'>")
+ for _, it := range diff {
+ if it.Type == diffmatchpatch.DiffInsert {
+ diffHTMLBuf.WriteString("<span class='gi'>")
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ diffHTMLBuf.WriteString("</span>")
+ } else if it.Type == diffmatchpatch.DiffDelete {
+ diffHTMLBuf.WriteString("<span class='gd'>")
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ diffHTMLBuf.WriteString("</span>")
+ } else {
+ diffHTMLBuf.WriteString(html.EscapeString(it.Text))
+ }
+ }
+ diffHTMLBuf.WriteString("</pre>")
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "canSoftDelete": canSoftDeleteContentHistory(ctx, issue, comment, history),
+ "historyId": historyID,
+ "prevHistoryId": prevHistoryID,
+ "diffHtml": diffHTMLBuf.String(),
+ })
+}
+
+// SoftDeleteContentHistory soft delete
+func SoftDeleteContentHistory(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ commentID := ctx.FormInt64("comment_id")
+ historyID := ctx.FormInt64("history_id")
+
+ var comment *issues_model.Comment
+ var history *issues_model.ContentHistory
+ var err error
+
+ if history, err = issues_model.GetIssueContentHistoryByID(ctx, historyID); err != nil {
+ log.Error("can not get issue content history %v. err=%v", historyID, err)
+ return
+ }
+ if history.IssueID != issue.ID {
+ ctx.NotFound("CompareRepoID", issues_model.ErrCommentNotExist{})
+ return
+ }
+ if commentID != 0 {
+ if history.CommentID != commentID {
+ ctx.NotFound("CompareCommentID", issues_model.ErrCommentNotExist{})
+ return
+ }
+
+ if comment, err = issues_model.GetCommentByID(ctx, commentID); err != nil {
+ log.Error("can not get comment for issue content history %v. err=%v", historyID, err)
+ return
+ }
+ if comment.IssueID != issue.ID {
+ ctx.NotFound("CompareIssueID", issues_model.ErrCommentNotExist{})
+ return
+ }
+ }
+
+ canSoftDelete := canSoftDeleteContentHistory(ctx, issue, comment, history)
+ if !canSoftDelete {
+ ctx.JSON(http.StatusForbidden, map[string]any{
+ "message": "Can not delete the content history",
+ })
+ return
+ }
+
+ err = issues_model.SoftDeleteIssueContentHistory(ctx, historyID)
+ log.Debug("soft delete issue content history. issue=%d, comment=%d, history=%d", issue.ID, commentID, historyID)
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": err == nil,
+ })
+}
diff --git a/routers/web/repo/issue_dependency.go b/routers/web/repo/issue_dependency.go
new file mode 100644
index 0000000..66b3868
--- /dev/null
+++ b/routers/web/repo/issue_dependency.go
@@ -0,0 +1,144 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// AddDependency adds new dependencies
+func AddDependency(ctx *context.Context) {
+ issueIndex := ctx.ParamsInt64("index")
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, issueIndex)
+ if err != nil {
+ ctx.ServerError("GetIssueByIndex", err)
+ return
+ }
+
+ // Check if the Repo is allowed to have dependencies
+ if !ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies")
+ return
+ }
+
+ depID := ctx.FormInt64("newDependency")
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return
+ }
+
+ // Redirect
+ defer ctx.Redirect(issue.Link())
+
+ // Dependency
+ dep, err := issues_model.GetIssueByID(ctx, depID)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_issue_not_exist"))
+ return
+ }
+
+ // Check if both issues are in the same repo if cross repository dependencies is not enabled
+ if issue.RepoID != dep.RepoID {
+ if !setting.Service.AllowCrossRepositoryDependencies {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_not_same_repo"))
+ return
+ }
+ if err := dep.LoadRepo(ctx); err != nil {
+ ctx.ServerError("loadRepo", err)
+ return
+ }
+ // Can ctx.Doer read issues in the dep repo?
+ depRepoPerm, err := access_model.GetUserRepoPermission(ctx, dep.Repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if !depRepoPerm.CanReadIssuesOrPulls(dep.IsPull) {
+ // you can't see this dependency
+ return
+ }
+ }
+
+ // Check if issue and dependency is the same
+ if dep.ID == issue.ID {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_same_issue"))
+ return
+ }
+
+ err = issues_model.CreateIssueDependency(ctx, ctx.Doer, issue, dep)
+ if err != nil {
+ if issues_model.IsErrDependencyExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_exists"))
+ return
+ } else if issues_model.IsErrCircularDependency(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_cannot_create_circular"))
+ return
+ }
+ ctx.ServerError("CreateOrUpdateIssueDependency", err)
+ return
+ }
+}
+
+// RemoveDependency removes the dependency
+func RemoveDependency(ctx *context.Context) {
+ issueIndex := ctx.ParamsInt64("index")
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, issueIndex)
+ if err != nil {
+ ctx.ServerError("GetIssueByIndex", err)
+ return
+ }
+
+ // Check if the Repo is allowed to have dependencies
+ if !ctx.Repo.CanCreateIssueDependencies(ctx, ctx.Doer, issue.IsPull) {
+ ctx.Error(http.StatusForbidden, "CanCreateIssueDependencies")
+ return
+ }
+
+ depID := ctx.FormInt64("removeDependencyID")
+
+ if err = issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return
+ }
+
+ // Dependency Type
+ depTypeStr := ctx.Req.PostFormValue("dependencyType")
+
+ var depType issues_model.DependencyType
+
+ switch depTypeStr {
+ case "blockedBy":
+ depType = issues_model.DependencyTypeBlockedBy
+ case "blocking":
+ depType = issues_model.DependencyTypeBlocking
+ default:
+ ctx.Error(http.StatusBadRequest, "GetDependecyType")
+ return
+ }
+
+ // Dependency
+ dep, err := issues_model.GetIssueByID(ctx, depID)
+ if err != nil {
+ ctx.ServerError("GetIssueByID", err)
+ return
+ }
+
+ if err = issues_model.RemoveIssueDependency(ctx, ctx.Doer, issue, dep, depType); err != nil {
+ if issues_model.IsErrDependencyNotExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.dependency.add_error_dep_not_exist"))
+ return
+ }
+ ctx.ServerError("RemoveIssueDependency", err)
+ return
+ }
+
+ // Redirect
+ ctx.Redirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_label.go b/routers/web/repo/issue_label.go
new file mode 100644
index 0000000..81bee4d
--- /dev/null
+++ b/routers/web/repo/issue_label.go
@@ -0,0 +1,229 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/label"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ issue_service "code.gitea.io/gitea/services/issue"
+)
+
+const (
+ tplLabels base.TplName = "repo/issue/labels"
+)
+
+// Labels render issue's labels page
+func Labels(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsLabels"] = true
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.HTML(http.StatusOK, tplLabels)
+}
+
+// InitializeLabels init labels for a repository
+func InitializeLabels(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.InitializeLabelsForm)
+ if ctx.HasError() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+
+ if err := repo_module.InitializeLabels(ctx, ctx.Repo.Repository.ID, form.TemplateName, false); err != nil {
+ if label.IsErrTemplateLoad(err) {
+ originalErr := err.(label.ErrTemplateLoad).OriginalError
+ ctx.Flash.Error(ctx.Tr("repo.issues.label_templates.fail_to_load_file", form.TemplateName, originalErr))
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+ ctx.ServerError("InitializeLabels", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// RetrieveLabels find all the labels of a repository and organization
+func RetrieveLabels(ctx *context.Context) {
+ labels, err := issues_model.GetLabelsByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("RetrieveLabels.GetLabels", err)
+ return
+ }
+
+ for _, l := range labels {
+ l.CalOpenIssues()
+ }
+
+ ctx.Data["Labels"] = labels
+
+ if ctx.Repo.Owner.IsOrganization() {
+ orgLabels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Repo.Owner.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+ for _, l := range orgLabels {
+ l.CalOpenOrgIssues(ctx, ctx.Repo.Repository.ID, l.ID)
+ }
+ ctx.Data["OrgLabels"] = orgLabels
+
+ org, err := organization.GetOrgByName(ctx, ctx.Repo.Owner.LowerName)
+ if err != nil {
+ ctx.ServerError("GetOrgByName", err)
+ return
+ }
+ if ctx.Doer != nil {
+ ctx.Org.IsOwner, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("org.IsOwnedBy", err)
+ return
+ }
+ ctx.Org.OrgLink = org.AsUser().OrganisationLink()
+ ctx.Data["IsOrganizationOwner"] = ctx.Org.IsOwner
+ ctx.Data["OrganizationLink"] = ctx.Org.OrgLink
+ }
+ }
+ ctx.Data["NumLabels"] = len(labels)
+ ctx.Data["SortType"] = ctx.FormString("sort")
+}
+
+// NewLabel create new label for repository
+func NewLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ ctx.Data["Title"] = ctx.Tr("repo.labels")
+ ctx.Data["PageIsLabels"] = true
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+ return
+ }
+
+ l := &issues_model.Label{
+ RepoID: ctx.Repo.Repository.ID,
+ Name: form.Title,
+ Exclusive: form.Exclusive,
+ Description: form.Description,
+ Color: form.Color,
+ }
+ if err := issues_model.NewLabel(ctx, l); err != nil {
+ ctx.ServerError("NewLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// UpdateLabel update a label's name and color
+func UpdateLabel(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateLabelForm)
+ l, err := issues_model.GetLabelInRepoByID(ctx, ctx.Repo.Repository.ID, form.ID)
+ if err != nil {
+ switch {
+ case issues_model.IsErrRepoLabelNotExist(err):
+ ctx.Error(http.StatusNotFound)
+ default:
+ ctx.ServerError("UpdateLabel", err)
+ }
+ return
+ }
+ l.Name = form.Title
+ l.Exclusive = form.Exclusive
+ l.Description = form.Description
+ l.Color = form.Color
+
+ l.SetArchived(form.IsArchived)
+ if err := issues_model.UpdateLabel(ctx, l); err != nil {
+ ctx.ServerError("UpdateLabel", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// DeleteLabel delete a label
+func DeleteLabel(ctx *context.Context) {
+ if err := issues_model.DeleteLabel(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteLabel: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.issues.label_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/labels")
+}
+
+// UpdateIssueLabel change issue's labels
+func UpdateIssueLabel(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ switch action := ctx.FormString("action"); action {
+ case "clear":
+ for _, issue := range issues {
+ if err := issue_service.ClearLabels(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("ClearLabels", err)
+ return
+ }
+ }
+ case "attach", "detach", "toggle", "toggle-alt":
+ label, err := issues_model.GetLabelByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ if issues_model.IsErrRepoLabelNotExist(err) {
+ ctx.Error(http.StatusNotFound, "GetLabelByID")
+ } else {
+ ctx.ServerError("GetLabelByID", err)
+ }
+ return
+ }
+
+ if action == "toggle" {
+ // detach if any issues already have label, otherwise attach
+ action = "attach"
+ if label.ExclusiveScope() == "" {
+ for _, issue := range issues {
+ if issues_model.HasIssueLabel(ctx, issue.ID, label.ID) {
+ action = "detach"
+ break
+ }
+ }
+ }
+ } else if action == "toggle-alt" {
+ // always detach with alt key pressed, to be able to remove
+ // scoped labels
+ action = "detach"
+ }
+
+ if action == "attach" {
+ for _, issue := range issues {
+ if err = issue_service.AddLabel(ctx, issue, ctx.Doer, label); err != nil {
+ ctx.ServerError("AddLabel", err)
+ return
+ }
+ }
+ } else {
+ for _, issue := range issues {
+ if err = issue_service.RemoveLabel(ctx, issue, ctx.Doer, label); err != nil {
+ ctx.ServerError("RemoveLabel", err)
+ return
+ }
+ }
+ }
+ default:
+ log.Warn("Unrecognized action: %s", action)
+ ctx.Error(http.StatusInternalServerError)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/repo/issue_label_test.go b/routers/web/repo/issue_label_test.go
new file mode 100644
index 0000000..2b4915e
--- /dev/null
+++ b/routers/web/repo/issue_label_test.go
@@ -0,0 +1,173 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strconv"
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/test"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func int64SliceToCommaSeparated(a []int64) string {
+ s := ""
+ for i, n := range a {
+ if i > 0 {
+ s += ","
+ }
+ s += strconv.Itoa(int(n))
+ }
+ return s
+}
+
+func TestInitializeLabels(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ require.NoError(t, repository.LoadRepoConfig())
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/initialize")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+ web.SetForm(ctx, &forms.InitializeLabelsForm{TemplateName: "Default"})
+ InitializeLabels(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ RepoID: 2,
+ Name: "enhancement",
+ Color: "#84b6eb",
+ })
+ assert.Equal(t, "/user2/repo2/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestRetrieveLabels(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ for _, testCase := range []struct {
+ RepoID int64
+ Sort string
+ ExpectedLabelIDs []int64
+ }{
+ {1, "", []int64{1, 2}},
+ {1, "leastissues", []int64{2, 1}},
+ {2, "", []int64{}},
+ } {
+ ctx, _ := contexttest.MockContext(t, "user/repo/issues")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, testCase.RepoID)
+ ctx.Req.Form.Set("sort", testCase.Sort)
+ RetrieveLabels(ctx)
+ assert.False(t, ctx.Written())
+ labels, ok := ctx.Data["Labels"].([]*issues_model.Label)
+ assert.True(t, ok)
+ if assert.Len(t, labels, len(testCase.ExpectedLabelIDs)) {
+ for i, label := range labels {
+ assert.EqualValues(t, testCase.ExpectedLabelIDs[i], label.ID)
+ }
+ }
+ }
+}
+
+func TestNewLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/edit")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.CreateLabelForm{
+ Title: "newlabel",
+ Color: "#abcdef",
+ })
+ NewLabel(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ Name: "newlabel",
+ Color: "#abcdef",
+ })
+ assert.Equal(t, "/user2/repo1/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestUpdateLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/edit")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.CreateLabelForm{
+ ID: 2,
+ Title: "newnameforlabel",
+ Color: "#abcdef",
+ IsArchived: true,
+ })
+ UpdateLabel(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
+ ID: 2,
+ Name: "newnameforlabel",
+ Color: "#abcdef",
+ })
+ assert.Equal(t, "/user2/repo1/labels", test.RedirectURL(ctx.Resp))
+}
+
+func TestDeleteLabel(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/labels/delete")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("id", "2")
+ DeleteLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ unittest.AssertNotExistsBean(t, &issues_model.Label{ID: 2})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{LabelID: 2})
+ assert.EqualValues(t, ctx.Tr("repo.issues.label_deletion_success"), ctx.Flash.SuccessMsg)
+}
+
+func TestUpdateIssueLabel_Clear(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("issue_ids", "1,3")
+ ctx.Req.Form.Set("action", "clear")
+ UpdateIssueLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 1})
+ unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 3})
+ unittest.CheckConsistencyFor(t, &issues_model.Label{})
+}
+
+func TestUpdateIssueLabel_Toggle(t *testing.T) {
+ for _, testCase := range []struct {
+ Action string
+ IssueIDs []int64
+ LabelID int64
+ ExpectedAdd bool // whether we expect the label to be added to the issues
+ }{
+ {"attach", []int64{1, 3}, 1, true},
+ {"detach", []int64{1, 3}, 1, false},
+ {"toggle", []int64{1, 3}, 1, false},
+ {"toggle", []int64{1, 2}, 2, true},
+ } {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.Req.Form.Set("issue_ids", int64SliceToCommaSeparated(testCase.IssueIDs))
+ ctx.Req.Form.Set("action", testCase.Action)
+ ctx.Req.Form.Set("id", strconv.Itoa(int(testCase.LabelID)))
+ UpdateIssueLabel(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ for _, issueID := range testCase.IssueIDs {
+ unittest.AssertExistsIf(t, testCase.ExpectedAdd, &issues_model.IssueLabel{
+ IssueID: issueID,
+ LabelID: testCase.LabelID,
+ })
+ }
+ unittest.CheckConsistencyFor(t, &issues_model.Label{})
+ }
+}
diff --git a/routers/web/repo/issue_lock.go b/routers/web/repo/issue_lock.go
new file mode 100644
index 0000000..1d5fc8a
--- /dev/null
+++ b/routers/web/repo/issue_lock.go
@@ -0,0 +1,65 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// LockIssue locks an issue. This would limit commenting abilities to
+// users with write access to the repo.
+func LockIssue(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.IssueLockForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if issue.IsLocked {
+ ctx.JSONError(ctx.Tr("repo.issues.lock_duplicate"))
+ return
+ }
+
+ if !form.HasValidReason() {
+ ctx.JSONError(ctx.Tr("repo.issues.lock.unknown_reason"))
+ return
+ }
+
+ if err := issues_model.LockIssue(ctx, &issues_model.IssueLockOptions{
+ Doer: ctx.Doer,
+ Issue: issue,
+ Reason: form.Reason,
+ }); err != nil {
+ ctx.ServerError("LockIssue", err)
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// UnlockIssue unlocks a previously locked issue.
+func UnlockIssue(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !issue.IsLocked {
+ ctx.JSONError(ctx.Tr("repo.issues.unlock_error"))
+ return
+ }
+
+ if err := issues_model.UnlockIssue(ctx, &issues_model.IssueLockOptions{
+ Doer: ctx.Doer,
+ Issue: issue,
+ }); err != nil {
+ ctx.ServerError("UnlockIssue", err)
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_pin.go b/routers/web/repo/issue_pin.go
new file mode 100644
index 0000000..365c812
--- /dev/null
+++ b/routers/web/repo/issue_pin.go
@@ -0,0 +1,107 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+// IssuePinOrUnpin pin or unpin a Issue
+func IssuePinOrUnpin(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // If we don't do this, it will crash when trying to add the pin event to the comment history
+ err := issue.LoadRepo(ctx)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ err = issue.PinOrUnpin(ctx, ctx.Doer)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// IssueUnpin unpins a Issue
+func IssueUnpin(ctx *context.Context) {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ // If we don't do this, it will crash when trying to add the pin event to the comment history
+ err = issue.LoadRepo(ctx)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ err = issue.Unpin(ctx, ctx.Doer)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
+
+// IssuePinMove moves a pinned Issue
+func IssuePinMove(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, "Only signed in users are allowed to perform this action.")
+ return
+ }
+
+ type movePinIssueForm struct {
+ ID int64 `json:"id"`
+ Position int `json:"position"`
+ }
+
+ form := &movePinIssueForm{}
+ if err := json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ issue, err := issues_model.GetIssueByID(ctx, form.ID)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ if issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.Status(http.StatusNotFound)
+ log.Error("Issue does not belong to this repository")
+ return
+ }
+
+ err = issue.MovePin(ctx, form.Position)
+ if err != nil {
+ ctx.Status(http.StatusInternalServerError)
+ log.Error(err.Error())
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
diff --git a/routers/web/repo/issue_stopwatch.go b/routers/web/repo/issue_stopwatch.go
new file mode 100644
index 0000000..70d42b2
--- /dev/null
+++ b/routers/web/repo/issue_stopwatch.go
@@ -0,0 +1,113 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/eventsource"
+ "code.gitea.io/gitea/services/context"
+)
+
+// IssueStopwatch creates or stops a stopwatch for the given issue.
+func IssueStopwatch(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+
+ var showSuccessMessage bool
+
+ if !issues_model.StopwatchExists(c, c.Doer.ID, issue.ID) {
+ showSuccessMessage = true
+ }
+
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ if err := issues_model.CreateOrStopIssueStopwatch(c, c.Doer, issue); err != nil {
+ c.ServerError("CreateOrStopIssueStopwatch", err)
+ return
+ }
+
+ if showSuccessMessage {
+ c.Flash.Success(c.Tr("repo.issues.tracker_auto_close"))
+ }
+
+ url := issue.Link()
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// CancelStopwatch cancel the stopwatch
+func CancelStopwatch(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ if err := issues_model.CancelStopwatch(c, c.Doer, issue); err != nil {
+ c.ServerError("CancelStopwatch", err)
+ return
+ }
+
+ stopwatches, err := issues_model.GetUserStopwatches(c, c.Doer.ID, db.ListOptions{})
+ if err != nil {
+ c.ServerError("GetUserStopwatches", err)
+ return
+ }
+ if len(stopwatches) == 0 {
+ eventsource.GetManager().SendMessage(c.Doer.ID, &eventsource.Event{
+ Name: "stopwatches",
+ Data: "{}",
+ })
+ }
+
+ url := issue.Link()
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// GetActiveStopwatch is the middleware that sets .ActiveStopwatch on context
+func GetActiveStopwatch(ctx *context.Context) {
+ if strings.HasPrefix(ctx.Req.URL.Path, "/api") {
+ return
+ }
+
+ if !ctx.IsSigned {
+ return
+ }
+
+ _, sw, issue, err := issues_model.HasUserStopwatch(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("HasUserStopwatch", err)
+ return
+ }
+
+ if sw == nil || sw.ID == 0 {
+ return
+ }
+
+ ctx.Data["ActiveStopwatch"] = StopwatchTmplInfo{
+ issue.Link(),
+ issue.Repo.FullName(),
+ issue.Index,
+ sw.Seconds() + 1, // ensure time is never zero in ui
+ }
+}
+
+// StopwatchTmplInfo is a view on a stopwatch specifically for template rendering
+type StopwatchTmplInfo struct {
+ IssueLink string
+ RepoSlug string
+ IssueIndex int64
+ Seconds int64
+}
diff --git a/routers/web/repo/issue_test.go b/routers/web/repo/issue_test.go
new file mode 100644
index 0000000..f1d0aac
--- /dev/null
+++ b/routers/web/repo/issue_test.go
@@ -0,0 +1,375 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCombineLabelComments(t *testing.T) {
+ kases := []struct {
+ name string
+ beforeCombined []*issues_model.Comment
+ afterCombined []*issues_model.Comment
+ }{
+ {
+ name: "kase 1",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{},
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 2",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 70,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ CreatedUnix: 70,
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 3",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 2,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 0,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 2,
+ Content: "",
+ CreatedUnix: 0,
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 1,
+ Content: "test",
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 4",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/backport",
+ },
+ CreatedUnix: 10,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ CreatedUnix: 10,
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ {
+ Name: "kind/backport",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ },
+ },
+ },
+ {
+ name: "kase 5",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 2,
+ Content: "testtest",
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeComment,
+ PosterID: 2,
+ Content: "testtest",
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ RemovedLabels: []*issues_model.Label{
+ {
+ Name: "kind/bug",
+ },
+ },
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ },
+ {
+ name: "kase 6",
+ beforeCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "reviewed/confirmed",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ CreatedUnix: 0,
+ },
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/feature",
+ },
+ CreatedUnix: 0,
+ },
+ },
+ afterCombined: []*issues_model.Comment{
+ {
+ Type: issues_model.CommentTypeLabel,
+ PosterID: 1,
+ Content: "1",
+ Label: &issues_model.Label{
+ Name: "kind/bug",
+ },
+ AddedLabels: []*issues_model.Label{
+ {
+ Name: "reviewed/confirmed",
+ },
+ {
+ Name: "kind/feature",
+ },
+ },
+ CreatedUnix: 0,
+ },
+ },
+ },
+ }
+
+ for _, kase := range kases {
+ t.Run(kase.name, func(t *testing.T) {
+ issue := issues_model.Issue{
+ Comments: kase.beforeCombined,
+ }
+ combineLabelComments(&issue)
+ assert.EqualValues(t, kase.afterCombined, issue.Comments)
+ })
+ }
+}
diff --git a/routers/web/repo/issue_timetrack.go b/routers/web/repo/issue_timetrack.go
new file mode 100644
index 0000000..241e434
--- /dev/null
+++ b/routers/web/repo/issue_timetrack.go
@@ -0,0 +1,87 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// AddTimeManually tracks time manually
+func AddTimeManually(c *context.Context) {
+ form := web.GetForm(c).(*forms.AddTimeManuallyForm)
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+ url := issue.Link()
+
+ if c.HasError() {
+ c.Flash.Error(c.GetErrMsg())
+ c.Redirect(url)
+ return
+ }
+
+ total := time.Duration(form.Hours)*time.Hour + time.Duration(form.Minutes)*time.Minute
+
+ if total <= 0 {
+ c.Flash.Error(c.Tr("repo.issues.add_time_sum_to_small"))
+ c.Redirect(url, http.StatusSeeOther)
+ return
+ }
+
+ if _, err := issues_model.AddTime(c, c.Doer, issue, int64(total.Seconds()), time.Now()); err != nil {
+ c.ServerError("AddTime", err)
+ return
+ }
+
+ c.Redirect(url, http.StatusSeeOther)
+}
+
+// DeleteTime deletes tracked time
+func DeleteTime(c *context.Context) {
+ issue := GetActionIssue(c)
+ if c.Written() {
+ return
+ }
+ if !c.Repo.CanUseTimetracker(c, issue, c.Doer) {
+ c.NotFound("CanUseTimetracker", nil)
+ return
+ }
+
+ t, err := issues_model.GetTrackedTimeByID(c, c.ParamsInt64(":timeid"))
+ if err != nil {
+ if db.IsErrNotExist(err) {
+ c.NotFound("time not found", err)
+ return
+ }
+ c.Error(http.StatusInternalServerError, "GetTrackedTimeByID", err.Error())
+ return
+ }
+
+ // only OP or admin may delete
+ if !c.IsSigned || (!c.IsUserSiteAdmin() && c.Doer.ID != t.UserID) {
+ c.Error(http.StatusForbidden, "not allowed")
+ return
+ }
+
+ if err = issues_model.DeleteTime(c, t); err != nil {
+ c.ServerError("DeleteTime", err)
+ return
+ }
+
+ c.Flash.Success(c.Tr("repo.issues.del_time_history", util.SecToTime(t.Time)))
+ c.Redirect(issue.Link())
+}
diff --git a/routers/web/repo/issue_watch.go b/routers/web/repo/issue_watch.go
new file mode 100644
index 0000000..5cff9f4
--- /dev/null
+++ b/routers/web/repo/issue_watch.go
@@ -0,0 +1,63 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strconv"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplWatching base.TplName = "repo/issue/view_content/sidebar/watching"
+)
+
+// IssueWatch sets issue watching
+func IssueWatch(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
+ if log.IsTrace() {
+ if ctx.IsSigned {
+ issueType := "issues"
+ if issue.IsPull {
+ issueType = "pulls"
+ }
+ log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
+ "User in Repo has Permissions: %-+v",
+ ctx.Doer,
+ issue.PosterID,
+ issueType,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ } else {
+ log.Trace("Permission Denied: Not logged in")
+ }
+ }
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ watch, err := strconv.ParseBool(ctx.Req.PostFormValue("watch"))
+ if err != nil {
+ ctx.ServerError("watch is not bool", err)
+ return
+ }
+
+ if err := issues_model.CreateOrUpdateIssueWatch(ctx, ctx.Doer.ID, issue.ID, watch); err != nil {
+ ctx.ServerError("CreateOrUpdateIssueWatch", err)
+ return
+ }
+
+ ctx.Data["Issue"] = issue
+ ctx.Data["IssueWatch"] = &issues_model.IssueWatch{IsWatching: watch}
+ ctx.HTML(http.StatusOK, tplWatching)
+}
diff --git a/routers/web/repo/main_test.go b/routers/web/repo/main_test.go
new file mode 100644
index 0000000..6e469cf
--- /dev/null
+++ b/routers/web/repo/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/repo/middlewares.go b/routers/web/repo/middlewares.go
new file mode 100644
index 0000000..ddda9f3
--- /dev/null
+++ b/routers/web/repo/middlewares.go
@@ -0,0 +1,120 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "strconv"
+
+ system_model "code.gitea.io/gitea/models/system"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/services/context"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+// SetEditorconfigIfExists set editor config as render variable
+func SetEditorconfigIfExists(ctx *context.Context) {
+ if ctx.Repo.Repository.IsEmpty {
+ return
+ }
+
+ ec, _, err := ctx.Repo.GetEditorconfig()
+
+ if err != nil && !git.IsErrNotExist(err) {
+ description := fmt.Sprintf("Error while getting .editorconfig file: %v", err)
+ if err := system_model.CreateRepositoryNotice(description); err != nil {
+ ctx.ServerError("ErrCreatingReporitoryNotice", err)
+ }
+ return
+ }
+
+ ctx.Data["Editorconfig"] = ec
+}
+
+// SetDiffViewStyle set diff style as render variable
+func SetDiffViewStyle(ctx *context.Context) {
+ queryStyle := ctx.FormString("style")
+
+ if !ctx.IsSigned {
+ ctx.Data["IsSplitStyle"] = queryStyle == "split"
+ return
+ }
+
+ var (
+ userStyle = ctx.Doer.DiffViewStyle
+ style string
+ )
+
+ if queryStyle == "unified" || queryStyle == "split" {
+ style = queryStyle
+ } else if userStyle == "unified" || userStyle == "split" {
+ style = userStyle
+ } else {
+ style = "unified"
+ }
+
+ ctx.Data["IsSplitStyle"] = style == "split"
+
+ opts := &user_service.UpdateOptions{
+ DiffViewStyle: optional.Some(style),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ }
+}
+
+// SetWhitespaceBehavior set whitespace behavior as render variable
+func SetWhitespaceBehavior(ctx *context.Context) {
+ const defaultWhitespaceBehavior = "show-all"
+ whitespaceBehavior := ctx.FormString("whitespace")
+ switch whitespaceBehavior {
+ case "", "ignore-all", "ignore-eol", "ignore-change":
+ break
+ default:
+ whitespaceBehavior = defaultWhitespaceBehavior
+ }
+ if ctx.IsSigned {
+ userWhitespaceBehavior, err := user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, defaultWhitespaceBehavior)
+ if err == nil {
+ if whitespaceBehavior == "" {
+ whitespaceBehavior = userWhitespaceBehavior
+ } else if whitespaceBehavior != userWhitespaceBehavior {
+ _ = user_model.SetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyDiffWhitespaceBehavior, whitespaceBehavior)
+ }
+ } // else: we can ignore the error safely
+ }
+
+ // these behaviors are for gitdiff.GetWhitespaceFlag
+ if whitespaceBehavior == "" {
+ ctx.Data["WhitespaceBehavior"] = defaultWhitespaceBehavior
+ } else {
+ ctx.Data["WhitespaceBehavior"] = whitespaceBehavior
+ }
+}
+
+// SetShowOutdatedComments set the show outdated comments option as context variable
+func SetShowOutdatedComments(ctx *context.Context) {
+ showOutdatedCommentsValue := ctx.FormString("show-outdated")
+ // var showOutdatedCommentsValue string
+
+ if showOutdatedCommentsValue != "true" && showOutdatedCommentsValue != "false" {
+ // invalid or no value for this form string -> use default or stored user setting
+ if ctx.IsSigned {
+ showOutdatedCommentsValue, _ = user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyShowOutdatedComments, "false")
+ } else {
+ // not logged in user -> use the default value
+ showOutdatedCommentsValue = "false"
+ }
+ } else {
+ // valid value -> update user setting if user is logged in
+ if ctx.IsSigned {
+ _ = user_model.SetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyShowOutdatedComments, showOutdatedCommentsValue)
+ }
+ }
+
+ showOutdatedComments, _ := strconv.ParseBool(showOutdatedCommentsValue)
+ ctx.Data["ShowOutdatedComments"] = showOutdatedComments
+}
diff --git a/routers/web/repo/migrate.go b/routers/web/repo/migrate.go
new file mode 100644
index 0000000..0acf966
--- /dev/null
+++ b/routers/web/repo/migrate.go
@@ -0,0 +1,310 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "net/url"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ admin_model "code.gitea.io/gitea/models/admin"
+ "code.gitea.io/gitea/models/db"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/migrations"
+ "code.gitea.io/gitea/services/task"
+)
+
+const (
+ tplMigrate base.TplName = "repo/migrate/migrate"
+)
+
+// Migrate render migration of repository page
+func Migrate(ctx *context.Context) {
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "Migrate: the site administrator has disabled migrations")
+ return
+ }
+
+ serviceType := structs.GitServiceType(ctx.FormInt("service_type"))
+
+ setMigrationContextData(ctx, serviceType)
+
+ if serviceType == 0 {
+ ctx.Data["Org"] = ctx.FormString("org")
+ ctx.Data["Mirror"] = ctx.FormString("mirror")
+
+ ctx.HTML(http.StatusOK, tplMigrate)
+ return
+ }
+
+ ctx.Data["private"] = getRepoPrivate(ctx)
+ ctx.Data["mirror"] = ctx.FormString("mirror") == "1"
+ ctx.Data["lfs"] = ctx.FormString("lfs") == "1"
+ ctx.Data["wiki"] = ctx.FormString("wiki") == "1"
+ ctx.Data["milestones"] = ctx.FormString("milestones") == "1"
+ ctx.Data["labels"] = ctx.FormString("labels") == "1"
+ ctx.Data["issues"] = ctx.FormString("issues") == "1"
+ ctx.Data["pull_requests"] = ctx.FormString("pull_requests") == "1"
+ ctx.Data["releases"] = ctx.FormString("releases") == "1"
+
+ ctxUser := checkContextUser(ctx, ctx.FormInt64("org"))
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ ctx.HTML(http.StatusOK, base.TplName("repo/migrate/"+serviceType.Name()))
+}
+
+func handleMigrateError(ctx *context.Context, owner *user_model.User, err error, name string, tpl base.TplName, form *forms.MigrateRepoForm) {
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "MigrateError: the site administrator has disabled migrations")
+ return
+ }
+
+ switch {
+ case migrations.IsRateLimitError(err):
+ ctx.RenderWithErr(ctx.Tr("form.visit_rate_limit"), tpl, form)
+ case migrations.IsTwoFactorAuthError(err):
+ ctx.RenderWithErr(ctx.Tr("form.2fa_auth_required"), tpl, form)
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tpl, form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tpl, form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tpl, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tpl, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tpl, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tpl, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tpl, form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form)
+ default:
+ err = util.SanitizeErrorCredentialURLs(err)
+ if strings.Contains(err.Error(), "Authentication failed") ||
+ strings.Contains(err.Error(), "Bad credentials") ||
+ strings.Contains(err.Error(), "could not read Username") {
+ ctx.Data["Err_Auth"] = true
+ ctx.RenderWithErr(ctx.Tr("form.auth_failed", err.Error()), tpl, form)
+ } else if strings.Contains(err.Error(), "fatal:") {
+ ctx.Data["Err_CloneAddr"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.failed", err.Error()), tpl, form)
+ } else {
+ ctx.ServerError(name, err)
+ }
+ }
+}
+
+func handleMigrateRemoteAddrError(ctx *context.Context, err error, tpl base.TplName, form *forms.MigrateRepoForm) {
+ if models.IsErrInvalidCloneAddr(err) {
+ addrErr := err.(*models.ErrInvalidCloneAddr)
+ switch {
+ case addrErr.IsProtocolInvalid:
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_protocol_invalid"), tpl, form)
+ case addrErr.IsURLError:
+ ctx.RenderWithErr(ctx.Tr("form.url_error", addrErr.Host), tpl, form)
+ case addrErr.IsPermissionDenied:
+ if addrErr.LocalPath {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied"), tpl, form)
+ } else {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied_blocked"), tpl, form)
+ }
+ case addrErr.IsInvalidPath:
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_local_path"), tpl, form)
+ default:
+ log.Error("Error whilst updating url: %v", err)
+ ctx.RenderWithErr(ctx.Tr("form.url_error", "unknown"), tpl, form)
+ }
+ } else {
+ log.Error("Error whilst updating url: %v", err)
+ ctx.RenderWithErr(ctx.Tr("form.url_error", "unknown"), tpl, form)
+ }
+}
+
+// MigratePost response for migrating from external git repository
+func MigratePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.MigrateRepoForm)
+ if setting.Repository.DisableMigrations {
+ ctx.Error(http.StatusForbidden, "MigratePost: the site administrator has disabled migrations")
+ return
+ }
+
+ if form.Mirror && setting.Mirror.DisableNewPull {
+ ctx.Error(http.StatusBadRequest, "MigratePost: the site administrator has disabled creation of new mirrors")
+ return
+ }
+
+ setMigrationContextData(ctx, form.Service)
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ tpl := base.TplName("repo/migrate/" + form.Service.Name())
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tpl)
+ return
+ }
+
+ remoteAddr, err := forms.ParseRemoteAddr(form.CloneAddr, form.AuthUsername, form.AuthPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(remoteAddr, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_CloneAddr"] = true
+ handleMigrateRemoteAddrError(ctx, err, tpl, form)
+ return
+ }
+
+ form.LFS = form.LFS && setting.LFS.StartServer
+
+ if form.LFS && len(form.LFSEndpoint) > 0 {
+ ep := lfs.DetermineEndpoint("", form.LFSEndpoint)
+ if ep == nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tpl, &form)
+ return
+ }
+ err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer)
+ if err != nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ handleMigrateRemoteAddrError(ctx, err, tpl, form)
+ return
+ }
+ }
+
+ opts := migrations.MigrateOptions{
+ OriginalURL: form.CloneAddr,
+ GitServiceType: form.Service,
+ CloneAddr: remoteAddr,
+ RepoName: form.RepoName,
+ Description: form.Description,
+ Private: form.Private || setting.Repository.ForcePrivate,
+ Mirror: form.Mirror,
+ LFS: form.LFS,
+ LFSEndpoint: form.LFSEndpoint,
+ AuthUsername: form.AuthUsername,
+ AuthPassword: form.AuthPassword,
+ AuthToken: form.AuthToken,
+ Wiki: form.Wiki,
+ Issues: form.Issues,
+ Milestones: form.Milestones,
+ Labels: form.Labels,
+ Comments: form.Issues || form.PullRequests,
+ PullRequests: form.PullRequests,
+ Releases: form.Releases,
+ }
+ if opts.Mirror {
+ opts.Issues = false
+ opts.Milestones = false
+ opts.Labels = false
+ opts.Comments = false
+ opts.PullRequests = false
+ opts.Releases = false
+ }
+
+ err = repo_model.CheckCreateRepository(ctx, ctx.Doer, ctxUser, opts.RepoName, false)
+ if err != nil {
+ handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form)
+ return
+ }
+
+ err = task.MigrateRepository(ctx, ctx.Doer, ctxUser, opts)
+ if err == nil {
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(opts.RepoName))
+ return
+ }
+
+ handleMigrateError(ctx, ctxUser, err, "MigratePost", tpl, form)
+}
+
+func setMigrationContextData(ctx *context.Context, serviceType structs.GitServiceType) {
+ ctx.Data["Title"] = ctx.Tr("new_migrate.title")
+
+ ctx.Data["LFSActive"] = setting.LFS.StartServer
+ ctx.Data["IsForcedPrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+
+ // Plain git should be first
+ ctx.Data["Services"] = append([]structs.GitServiceType{structs.PlainGitService}, structs.SupportedFullGitService...)
+ ctx.Data["service"] = serviceType
+}
+
+func MigrateRetryPost(ctx *context.Context) {
+ ok, err := quota_model.EvaluateForUser(ctx, ctx.Repo.Repository.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ log.Error("quota_model.EvaluateForUser: %v", err)
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ if err := task.SetMigrateTaskMessage(ctx, ctx.Repo.Repository.ID, ctx.Locale.TrString("repo.settings.pull_mirror_sync_quota_exceeded")); err != nil {
+ log.Error("SetMigrateTaskMessage failed: %v", err)
+ ctx.ServerError("task.SetMigrateTaskMessage", err)
+ return
+ }
+ ctx.JSON(http.StatusRequestEntityTooLarge, map[string]any{
+ "ok": false,
+ "error": ctx.Tr("repo.settings.pull_mirror_sync_quota_exceeded"),
+ })
+ return
+ }
+
+ if err := task.RetryMigrateTask(ctx, ctx.Repo.Repository.ID); err != nil {
+ log.Error("Retry task failed: %v", err)
+ ctx.ServerError("task.RetryMigrateTask", err)
+ return
+ }
+ ctx.JSONOK()
+}
+
+func MigrateCancelPost(ctx *context.Context) {
+ migratingTask, err := admin_model.GetMigratingTask(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ log.Error("GetMigratingTask: %v", err)
+ ctx.Redirect(ctx.Repo.Repository.Link())
+ return
+ }
+ if migratingTask.Status == structs.TaskStatusRunning {
+ taskUpdate := &admin_model.Task{ID: migratingTask.ID, Status: structs.TaskStatusFailed, Message: "canceled"}
+ if err = taskUpdate.UpdateCols(ctx, "status", "message"); err != nil {
+ ctx.ServerError("task.UpdateCols", err)
+ return
+ }
+ }
+ ctx.Redirect(ctx.Repo.Repository.Link())
+}
diff --git a/routers/web/repo/milestone.go b/routers/web/repo/milestone.go
new file mode 100644
index 0000000..1c53f73
--- /dev/null
+++ b/routers/web/repo/milestone.go
@@ -0,0 +1,304 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/issue"
+
+ "xorm.io/builder"
+)
+
+const (
+ tplMilestone base.TplName = "repo/issue/milestones"
+ tplMilestoneNew base.TplName = "repo/issue/milestone_new"
+ tplMilestoneIssues base.TplName = "repo/issue/milestone_issues"
+)
+
+// Milestones render milestones page
+func Milestones(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+
+ isShowClosed := ctx.FormString("state") == "closed"
+ sortType := ctx.FormString("sort")
+ keyword := ctx.FormTrim("q")
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ miles, total, err := db.FindAndCount[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoID: ctx.Repo.Repository.ID,
+ IsClosed: optional.Some(isShowClosed),
+ SortType: sortType,
+ Name: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("GetMilestones", err)
+ return
+ }
+
+ stats, err := issues_model.GetMilestonesStatsByRepoCondAndKw(ctx, builder.And(builder.Eq{"id": ctx.Repo.Repository.ID}), keyword)
+ if err != nil {
+ ctx.ServerError("GetMilestoneStats", err)
+ return
+ }
+ ctx.Data["OpenCount"] = stats.OpenCount
+ ctx.Data["ClosedCount"] = stats.ClosedCount
+ linkStr := "%s/milestones?state=%s&q=%s&sort=%s"
+ ctx.Data["OpenLink"] = fmt.Sprintf(linkStr, ctx.Repo.RepoLink, "open",
+ url.QueryEscape(keyword), url.QueryEscape(sortType))
+ ctx.Data["ClosedLink"] = fmt.Sprintf(linkStr, ctx.Repo.RepoLink, "closed",
+ url.QueryEscape(keyword), url.QueryEscape(sortType))
+
+ if ctx.Repo.Repository.IsTimetrackerEnabled(ctx) {
+ if err := issues_model.MilestoneList(miles).LoadTotalTrackedTimes(ctx); err != nil {
+ ctx.ServerError("LoadTotalTrackedTimes", err)
+ return
+ }
+ }
+ for _, m := range miles {
+ m.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, m.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ }
+ ctx.Data["Milestones"] = miles
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ ctx.Data["SortType"] = sortType
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["IsShowClosed"] = isShowClosed
+
+ pager := context.NewPagination(int(total), setting.UI.IssuePagingNum, page, 5)
+ pager.AddParam(ctx, "state", "State")
+ pager.AddParam(ctx, "q", "Keyword")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplMilestone)
+}
+
+// NewMilestone render creating milestone page
+func NewMilestone(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+}
+
+// NewMilestonePost response for creating milestone
+func NewMilestonePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateMilestoneForm)
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.new")
+ ctx.Data["PageIsIssueList"] = true
+ ctx.Data["PageIsMilestones"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+ return
+ }
+
+ if len(form.Deadline) == 0 {
+ form.Deadline = "9999-12-31"
+ }
+ deadline, err := time.ParseInLocation("2006-01-02", form.Deadline, time.Local)
+ if err != nil {
+ ctx.Data["Err_Deadline"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.milestones.invalid_due_date_format"), tplMilestoneNew, &form)
+ return
+ }
+
+ deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location())
+ if err = issues_model.NewMilestone(ctx, &issues_model.Milestone{
+ RepoID: ctx.Repo.Repository.ID,
+ Name: form.Title,
+ Content: form.Content,
+ DeadlineUnix: timeutil.TimeStamp(deadline.Unix()),
+ }); err != nil {
+ ctx.ServerError("NewMilestone", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.milestones.create_success", form.Title))
+ ctx.Redirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// EditMilestone render edting milestone page
+func EditMilestone(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.edit")
+ ctx.Data["PageIsMilestones"] = true
+ ctx.Data["PageIsEditMilestone"] = true
+
+ m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ }
+ return
+ }
+ ctx.Data["title"] = m.Name
+ ctx.Data["content"] = m.Content
+ if len(m.DeadlineString) > 0 {
+ ctx.Data["deadline"] = m.DeadlineString
+ }
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+}
+
+// EditMilestonePost response for edting milestone
+func EditMilestonePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateMilestoneForm)
+ ctx.Data["Title"] = ctx.Tr("repo.milestones.edit")
+ ctx.Data["PageIsMilestones"] = true
+ ctx.Data["PageIsEditMilestone"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplMilestoneNew)
+ return
+ }
+
+ if len(form.Deadline) == 0 {
+ form.Deadline = "9999-12-31"
+ }
+ deadline, err := time.ParseInLocation("2006-01-02", form.Deadline, time.Local)
+ if err != nil {
+ ctx.Data["Err_Deadline"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.milestones.invalid_due_date_format"), tplMilestoneNew, &form)
+ return
+ }
+
+ deadline = time.Date(deadline.Year(), deadline.Month(), deadline.Day(), 23, 59, 59, 0, deadline.Location())
+ m, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetMilestoneByRepoID", err)
+ }
+ return
+ }
+ m.Name = form.Title
+ m.Content = form.Content
+ m.DeadlineUnix = timeutil.TimeStamp(deadline.Unix())
+ if err = issues_model.UpdateMilestone(ctx, m, m.IsClosed); err != nil {
+ ctx.ServerError("UpdateMilestone", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.milestones.edit_success", m.Name))
+ ctx.Redirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// ChangeMilestoneStatus response for change a milestone's status
+func ChangeMilestoneStatus(ctx *context.Context) {
+ var toClose bool
+ switch ctx.Params(":action") {
+ case "open":
+ toClose = false
+ case "close":
+ toClose = true
+ default:
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones")
+ return
+ }
+ id := ctx.ParamsInt64(":id")
+
+ if err := issues_model.ChangeMilestoneStatusByRepoIDAndID(ctx, ctx.Repo.Repository.ID, id, toClose); err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("", err)
+ } else {
+ ctx.ServerError("ChangeMilestoneStatusByIDAndRepoID", err)
+ }
+ return
+ }
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones?state=" + url.QueryEscape(ctx.Params(":action")))
+}
+
+// DeleteMilestone delete a milestone
+func DeleteMilestone(ctx *context.Context) {
+ if err := issues_model.DeleteMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteMilestoneByRepoID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.milestones.deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/milestones")
+}
+
+// MilestoneIssuesAndPulls lists all the issues and pull requests of the milestone
+func MilestoneIssuesAndPulls(ctx *context.Context) {
+ milestoneID := ctx.ParamsInt64(":id")
+ projectID := ctx.FormInt64("project")
+ milestone, err := issues_model.GetMilestoneByRepoID(ctx, ctx.Repo.Repository.ID, milestoneID)
+ if err != nil {
+ if issues_model.IsErrMilestoneNotExist(err) {
+ ctx.NotFound("GetMilestoneByID", err)
+ return
+ }
+
+ ctx.ServerError("GetMilestoneByID", err)
+ return
+ }
+
+ milestone.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, milestone.Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.Data["Title"] = milestone.Name
+ ctx.Data["Milestone"] = milestone
+
+ issues(ctx, milestoneID, projectID, optional.None[bool]())
+
+ ret, _ := issue.GetTemplatesFromDefaultBranch(ctx.Repo.Repository, ctx.Repo.GitRepo)
+ ctx.Data["NewIssueChooseTemplate"] = len(ret) > 0
+
+ ctx.Data["CanWriteIssues"] = ctx.Repo.CanWriteIssuesOrPulls(false)
+ ctx.Data["CanWritePulls"] = ctx.Repo.CanWriteIssuesOrPulls(true)
+
+ ctx.HTML(http.StatusOK, tplMilestoneIssues)
+}
diff --git a/routers/web/repo/packages.go b/routers/web/repo/packages.go
new file mode 100644
index 0000000..11874ab
--- /dev/null
+++ b/routers/web/repo/packages.go
@@ -0,0 +1,78 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplPackagesList base.TplName = "repo/packages"
+)
+
+// Packages displays a list of all packages in the repository
+func Packages(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ query := ctx.FormTrim("q")
+ packageType := ctx.FormTrim("type")
+
+ pvs, total, err := packages.SearchLatestVersions(ctx, &packages.PackageSearchOptions{
+ Paginator: &db.ListOptions{
+ PageSize: setting.UI.PackagesPagingNum,
+ Page: page,
+ },
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ Type: packages.Type(packageType),
+ Name: packages.SearchValue{Value: query},
+ IsInternal: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("SearchLatestVersions", err)
+ return
+ }
+
+ pds, err := packages.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptors", err)
+ return
+ }
+
+ hasPackages, err := packages.HasRepositoryPackages(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("HasRepositoryPackages", err)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["Query"] = query
+ ctx.Data["PackageType"] = packageType
+ ctx.Data["AvailableTypes"] = packages.TypeList
+ ctx.Data["HasPackages"] = hasPackages
+ if ctx.Repo != nil {
+ ctx.Data["CanWritePackages"] = ctx.IsUserRepoWriter([]unit.Type{unit.TypePackages}) || ctx.IsUserSiteAdmin()
+ }
+ ctx.Data["PackageDescriptors"] = pds
+ ctx.Data["Total"] = total
+ ctx.Data["RepositoryAccessMap"] = map[int64]bool{ctx.Repo.Repository.ID: true} // There is only the current repository
+
+ pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5)
+ pager.AddParam(ctx, "q", "Query")
+ pager.AddParam(ctx, "type", "PackageType")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplPackagesList)
+}
diff --git a/routers/web/repo/patch.go b/routers/web/repo/patch.go
new file mode 100644
index 0000000..d234f6c
--- /dev/null
+++ b/routers/web/repo/patch.go
@@ -0,0 +1,124 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/repository/files"
+)
+
+const (
+ tplPatchFile base.TplName = "repo/editor/patch"
+)
+
+// NewDiffPatch render create patch page
+func NewDiffPatch(ctx *context.Context) {
+ canCommit := renderCommitRights(ctx)
+
+ ctx.Data["PageIsPatch"] = true
+
+ ctx.Data["commit_summary"] = ""
+ ctx.Data["commit_message"] = ""
+ if canCommit {
+ ctx.Data["commit_choice"] = frmCommitChoiceDirect
+ } else {
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ }
+ ctx.Data["new_branch_name"] = GetUniquePatchBranchName(ctx)
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+
+ ctx.HTML(200, tplPatchFile)
+}
+
+// NewDiffPatchPost response for sending patch page
+func NewDiffPatchPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditRepoFileForm)
+
+ canCommit := renderCommitRights(ctx)
+ branchName := ctx.Repo.BranchName
+ if form.CommitChoice == frmCommitChoiceNewBranch {
+ branchName = form.NewBranchName
+ }
+ ctx.Data["PageIsPatch"] = true
+ ctx.Data["BranchLink"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["FileContent"] = form.Content
+ ctx.Data["commit_summary"] = form.CommitSummary
+ ctx.Data["commit_message"] = form.CommitMessage
+ ctx.Data["commit_choice"] = form.CommitChoice
+ ctx.Data["new_branch_name"] = form.NewBranchName
+ ctx.Data["last_commit"] = ctx.Repo.CommitID
+ ctx.Data["LineWrapExtensions"] = strings.Join(setting.Repository.Editor.LineWrapExtensions, ",")
+
+ if ctx.HasError() {
+ ctx.HTML(200, tplPatchFile)
+ return
+ }
+
+ // Cannot commit to a an existing branch if user doesn't have rights
+ if branchName == ctx.Repo.BranchName && !canCommit {
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.Data["commit_choice"] = frmCommitChoiceNewBranch
+ ctx.RenderWithErr(ctx.Tr("repo.editor.cannot_commit_to_protected_branch", branchName), tplEditFile, &form)
+ return
+ }
+
+ // CommitSummary is optional in the web form, if empty, give it a default message based on add or update
+ // `message` will be both the summary and message combined
+ message := strings.TrimSpace(form.CommitSummary)
+ if len(message) == 0 {
+ message = ctx.Locale.TrString("repo.editor.patch")
+ }
+
+ form.CommitMessage = strings.TrimSpace(form.CommitMessage)
+ if len(form.CommitMessage) > 0 {
+ message += "\n\n" + form.CommitMessage
+ }
+
+ gitIdenitity := getGitIdentity(ctx, form.CommitMailID, tplPatchFile, &form)
+ if ctx.Written() {
+ return
+ }
+
+ fileResponse, err := files.ApplyDiffPatch(ctx, ctx.Repo.Repository, ctx.Doer, &files.ApplyDiffPatchOptions{
+ LastCommitID: form.LastCommit,
+ OldBranch: ctx.Repo.BranchName,
+ NewBranch: branchName,
+ Message: message,
+ Content: strings.ReplaceAll(form.Content, "\r", ""),
+ Author: gitIdenitity,
+ Committer: gitIdenitity,
+ })
+ if err != nil {
+ if git_model.IsErrBranchAlreadyExists(err) {
+ // User has specified a branch that already exists
+ branchErr := err.(git_model.ErrBranchAlreadyExists)
+ ctx.Data["Err_NewBranchName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.editor.branch_already_exists", branchErr.BranchName), tplEditFile, &form)
+ return
+ } else if models.IsErrCommitIDDoesNotMatch(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.editor.file_changed_while_editing", ctx.Repo.RepoLink+"/compare/"+form.LastCommit+"..."+ctx.Repo.CommitID), tplPatchFile, &form)
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.editor.fail_to_apply_patch", err), tplPatchFile, &form)
+ return
+ }
+
+ if form.CommitChoice == frmCommitChoiceNewBranch && ctx.Repo.Repository.UnitEnabled(ctx, unit.TypePullRequests) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ctx.Repo.BranchName) + "..." + util.PathEscapeSegments(form.NewBranchName))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/commit/" + fileResponse.Commit.SHA)
+ }
+}
diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go
new file mode 100644
index 0000000..878b7ee
--- /dev/null
+++ b/routers/web/repo/projects.go
@@ -0,0 +1,670 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/perm"
+ project_model "code.gitea.io/gitea/models/project"
+ attachment_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplProjects base.TplName = "repo/projects/list"
+ tplProjectsNew base.TplName = "repo/projects/new"
+ tplProjectsView base.TplName = "repo/projects/view"
+)
+
+// MustEnableProjects check if projects are enabled in settings
+func MustEnableProjects(ctx *context.Context) {
+ if unit.TypeProjects.UnitGlobalDisabled() {
+ ctx.NotFound("EnableRepoProjects", nil)
+ return
+ }
+
+ if ctx.Repo.Repository != nil {
+ if !ctx.Repo.CanRead(unit.TypeProjects) {
+ ctx.NotFound("MustEnableProjects", nil)
+ return
+ }
+ }
+}
+
+// Projects renders the home page of projects
+func Projects(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects")
+
+ sortType := ctx.FormTrim("sort")
+
+ isShowClosed := strings.ToLower(ctx.FormTrim("state")) == "closed"
+ keyword := ctx.FormTrim("q")
+ repo := ctx.Repo.Repository
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ ctx.Data["OpenCount"] = repo.NumOpenProjects
+ ctx.Data["ClosedCount"] = repo.NumClosedProjects
+
+ var total int
+ if !isShowClosed {
+ total = repo.NumOpenProjects
+ } else {
+ total = repo.NumClosedProjects
+ }
+
+ projects, count, err := db.FindAndCount[project_model.Project](ctx, project_model.SearchOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.IssuePagingNum,
+ Page: page,
+ },
+ RepoID: repo.ID,
+ IsClosed: optional.Some(isShowClosed),
+ OrderBy: project_model.GetSearchOrderByBySortType(sortType),
+ Type: project_model.TypeRepository,
+ Title: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("GetProjects", err)
+ return
+ }
+
+ for i := range projects {
+ projects[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, projects[i].Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ }
+
+ ctx.Data["Projects"] = projects
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ numPages := 0
+ if count > 0 {
+ numPages = (int(count) - 1/setting.UI.IssuePagingNum)
+ }
+
+ pager := context.NewPagination(total, setting.UI.IssuePagingNum, page, numPages)
+ pager.AddParam(ctx, "state", "State")
+ ctx.Data["Page"] = pager
+
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["IsShowClosed"] = isShowClosed
+ ctx.Data["IsProjectsPage"] = true
+ ctx.Data["SortType"] = sortType
+
+ ctx.HTML(http.StatusOK, tplProjects)
+}
+
+// RenderNewProject render creating a project page
+func RenderNewProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+ ctx.Data["TemplateConfigs"] = project_model.GetTemplateConfigs()
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CancelLink"] = ctx.Repo.Repository.Link() + "/projects"
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// NewProjectPost creates a new project
+func NewProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ ctx.Data["Title"] = ctx.Tr("repo.projects.new")
+
+ if ctx.HasError() {
+ RenderNewProject(ctx)
+ return
+ }
+
+ if err := project_model.NewProject(ctx, &project_model.Project{
+ RepoID: ctx.Repo.Repository.ID,
+ Title: form.Title,
+ Description: form.Content,
+ CreatorID: ctx.Doer.ID,
+ TemplateType: form.TemplateType,
+ CardType: form.CardType,
+ Type: project_model.TypeRepository,
+ }); err != nil {
+ ctx.ServerError("NewProject", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.create_success", form.Title))
+ ctx.Redirect(ctx.Repo.RepoLink + "/projects")
+}
+
+// ChangeProjectStatus updates the status of a project between "open" and "close"
+func ChangeProjectStatus(ctx *context.Context) {
+ var toClose bool
+ switch ctx.Params(":action") {
+ case "open":
+ toClose = false
+ case "close":
+ toClose = true
+ default:
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects")
+ return
+ }
+ id := ctx.ParamsInt64(":id")
+
+ if err := project_model.ChangeProjectStatusByRepoIDAndID(ctx, ctx.Repo.Repository.ID, id, toClose); err != nil {
+ ctx.NotFoundOrServerError("ChangeProjectStatusByRepoIDAndID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ ctx.JSONRedirect(fmt.Sprintf("%s/projects/%d", ctx.Repo.RepoLink, id))
+}
+
+// DeleteProject delete a project
+func DeleteProject(ctx *context.Context) {
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ if err := project_model.DeleteProjectByID(ctx, p.ID); err != nil {
+ ctx.Flash.Error("DeleteProjectByID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.projects.deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/projects")
+}
+
+// RenderEditProject allows a project to be edited
+func RenderEditProject(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+
+ p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ctx.Data["projectID"] = p.ID
+ ctx.Data["title"] = p.Title
+ ctx.Data["content"] = p.Description
+ ctx.Data["card_type"] = p.CardType
+ ctx.Data["redirect"] = ctx.FormString("redirect")
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/projects/%d", ctx.Repo.Repository.Link(), p.ID)
+
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+}
+
+// EditProjectPost response for editing a project
+func EditProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateProjectForm)
+ projectID := ctx.ParamsInt64(":id")
+
+ ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
+ ctx.Data["PageIsEditProjects"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CardTypes"] = project_model.GetCardConfig()
+ ctx.Data["CancelLink"] = fmt.Sprintf("%s/projects/%d", ctx.Repo.Repository.Link(), projectID)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplProjectsNew)
+ return
+ }
+
+ p, err := project_model.GetProjectByID(ctx, projectID)
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if p.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ p.Title = form.Title
+ p.Description = form.Content
+ p.CardType = form.CardType
+ if err = project_model.UpdateProject(ctx, p); err != nil {
+ ctx.ServerError("UpdateProjects", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.projects.edit_success", p.Title))
+ if ctx.FormString("redirect") == "project" {
+ ctx.Redirect(p.Link(ctx))
+ } else {
+ ctx.Redirect(ctx.Repo.RepoLink + "/projects")
+ }
+}
+
+// ViewProject renders the project with board view
+func ViewProject(ctx *context.Context) {
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ columns, err := project.GetColumns(ctx)
+ if err != nil {
+ ctx.ServerError("GetProjectColumns", err)
+ return
+ }
+
+ issuesMap, err := issues_model.LoadIssuesFromColumnList(ctx, columns)
+ if err != nil {
+ ctx.ServerError("LoadIssuesOfColumns", err)
+ return
+ }
+
+ if project.CardType != project_model.CardTypeTextOnly {
+ issuesAttachmentMap := make(map[int64][]*attachment_model.Attachment)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ if issueAttachment, err := attachment_model.GetAttachmentsByIssueIDImagesLatest(ctx, issue.ID); err == nil {
+ issuesAttachmentMap[issue.ID] = issueAttachment
+ }
+ }
+ }
+ ctx.Data["issuesAttachmentMap"] = issuesAttachmentMap
+ }
+
+ linkedPrsMap := make(map[int64][]*issues_model.Issue)
+ for _, issuesList := range issuesMap {
+ for _, issue := range issuesList {
+ var referencedIDs []int64
+ for _, comment := range issue.Comments {
+ if comment.RefIssueID != 0 && comment.RefIsPull {
+ referencedIDs = append(referencedIDs, comment.RefIssueID)
+ }
+ }
+
+ if len(referencedIDs) > 0 {
+ if linkedPrs, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ IssueIDs: referencedIDs,
+ IsPull: optional.Some(true),
+ }); err == nil {
+ linkedPrsMap[issue.ID] = linkedPrs
+ }
+ }
+ }
+ }
+ ctx.Data["LinkedPRs"] = linkedPrsMap
+
+ project.RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, project.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ ctx.Data["IsProjectsPage"] = true
+ ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["Project"] = project
+ ctx.Data["IssuesMap"] = issuesMap
+ ctx.Data["Columns"] = columns
+
+ ctx.HTML(http.StatusOK, tplProjectsView)
+}
+
+// UpdateIssueProject change an issue's project
+func UpdateIssueProject(ctx *context.Context) {
+ issues := getActionIssues(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := issues.LoadProjects(ctx); err != nil {
+ ctx.ServerError("LoadProjects", err)
+ return
+ }
+ if _, err := issues.LoadRepositories(ctx); err != nil {
+ ctx.ServerError("LoadProjects", err)
+ return
+ }
+
+ projectID := ctx.FormInt64("id")
+ for _, issue := range issues {
+ if issue.Project != nil && issue.Project.ID == projectID {
+ continue
+ }
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, issue, ctx.Doer, projectID, 0); err != nil {
+ if errors.Is(err, util.ErrPermissionDenied) {
+ continue
+ }
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+
+ ctx.JSONOK()
+}
+
+// DeleteProjectColumn allows for the deletion of a project column
+func DeleteProjectColumn(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+
+ pb, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return
+ }
+ if pb.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", pb.ID, project.ID),
+ })
+ return
+ }
+
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", pb.ID, ctx.Repo.Repository.ID),
+ })
+ return
+ }
+
+ if err := project_model.DeleteColumnByID(ctx, ctx.ParamsInt64(":columnID")); err != nil {
+ ctx.ServerError("DeleteProjectColumnByID", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// AddColumnToProjectPost allows a new column to be added to a project.
+func AddColumnToProjectPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectForRepoByID(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+
+ if err := project_model.NewColumn(ctx, &project_model.Column{
+ ProjectID: project.ID,
+ Title: form.Title,
+ Color: form.Color,
+ CreatorID: ctx.Doer.ID,
+ }); err != nil {
+ ctx.ServerError("NewProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+func checkProjectColumnChangePermissions(ctx *context.Context) (*project_model.Project, *project_model.Column) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return nil, nil
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return nil, nil
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return nil, nil
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ ctx.ServerError("GetProjectColumn", err)
+ return nil, nil
+ }
+ if column.ProjectID != ctx.ParamsInt64(":id") {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Project[%d] as expected", column.ID, project.ID),
+ })
+ return nil, nil
+ }
+
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]string{
+ "message": fmt.Sprintf("ProjectColumn[%d] is not in Repository[%d] as expected", column.ID, ctx.Repo.Repository.ID),
+ })
+ return nil, nil
+ }
+ return project, column
+}
+
+// EditProjectColumn allows a project column's to be updated
+func EditProjectColumn(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditProjectColumnForm)
+ _, column := checkProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if form.Title != "" {
+ column.Title = form.Title
+ }
+ column.Color = form.Color
+ if form.Sorting != 0 {
+ column.Sorting = form.Sorting
+ }
+
+ if err := project_model.UpdateColumn(ctx, column); err != nil {
+ ctx.ServerError("UpdateProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// SetDefaultProjectColumn set default column for uncategorized issues/pulls
+func SetDefaultProjectColumn(ctx *context.Context) {
+ project, column := checkProjectColumnChangePermissions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := project_model.SetDefaultColumn(ctx, project.ID, column.ID); err != nil {
+ ctx.ServerError("SetDefaultColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
+
+// MoveIssues moves or keeps issues in a column and sorts them inside that column
+func MoveIssues(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only signed in users are allowed to perform this action.",
+ })
+ return
+ }
+
+ if !ctx.Repo.IsOwner() && !ctx.Repo.IsAdmin() && !ctx.Repo.CanAccess(perm.AccessModeWrite, unit.TypeProjects) {
+ ctx.JSON(http.StatusForbidden, map[string]string{
+ "message": "Only authorized users are allowed to perform this action.",
+ })
+ return
+ }
+
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ if project_model.IsErrProjectNotExist(err) {
+ ctx.NotFound("ProjectNotExist", nil)
+ } else {
+ ctx.ServerError("GetProjectByID", err)
+ }
+ return
+ }
+ if project.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("InvalidRepoID", nil)
+ return
+ }
+
+ column, err := project_model.GetColumn(ctx, ctx.ParamsInt64(":columnID"))
+ if err != nil {
+ if project_model.IsErrProjectColumnNotExist(err) {
+ ctx.NotFound("ProjectColumnNotExist", nil)
+ } else {
+ ctx.ServerError("GetProjectColumn", err)
+ }
+ return
+ }
+
+ if column.ProjectID != project.ID {
+ ctx.NotFound("ColumnNotInProject", nil)
+ return
+ }
+
+ type movedIssuesForm struct {
+ Issues []struct {
+ IssueID int64 `json:"issueID"`
+ Sorting int64 `json:"sorting"`
+ } `json:"issues"`
+ }
+
+ form := &movedIssuesForm{}
+ if err = json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.ServerError("DecodeMovedIssuesForm", err)
+ }
+
+ issueIDs := make([]int64, 0, len(form.Issues))
+ sortedIssueIDs := make(map[int64]int64)
+ for _, issue := range form.Issues {
+ issueIDs = append(issueIDs, issue.IssueID)
+ sortedIssueIDs[issue.Sorting] = issue.IssueID
+ }
+ movedIssues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("IssueNotExisting", nil)
+ } else {
+ ctx.ServerError("GetIssueByID", err)
+ }
+ return
+ }
+
+ if len(movedIssues) != len(form.Issues) {
+ ctx.ServerError("some issues do not exist", errors.New("some issues do not exist"))
+ return
+ }
+
+ for _, issue := range movedIssues {
+ if issue.RepoID != project.RepoID {
+ ctx.ServerError("Some issue's repoID is not equal to project's repoID", errors.New("Some issue's repoID is not equal to project's repoID"))
+ return
+ }
+ }
+
+ if err = project_model.MoveIssuesOnProjectColumn(ctx, column, sortedIssueIDs); err != nil {
+ ctx.ServerError("MoveIssuesOnProjectColumn", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/repo/projects_test.go b/routers/web/repo/projects_test.go
new file mode 100644
index 0000000..d61230a
--- /dev/null
+++ b/routers/web/repo/projects_test.go
@@ -0,0 +1,27 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestCheckProjectColumnChangePermissions(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/projects/1/2")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ ctx.SetParams(":id", "1")
+ ctx.SetParams(":columnID", "2")
+
+ project, column := checkProjectColumnChangePermissions(ctx)
+ assert.NotNil(t, project)
+ assert.NotNil(t, column)
+ assert.False(t, ctx.Written())
+}
diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go
new file mode 100644
index 0000000..bc85012
--- /dev/null
+++ b/routers/web/repo/pull.go
@@ -0,0 +1,1838 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "html"
+ "net/http"
+ "net/url"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ pull_model "code.gitea.io/gitea/models/pull"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/emoji"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ issue_template "code.gitea.io/gitea/modules/issue/template"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/utils"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/automerge"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/gitdiff"
+ notify_service "code.gitea.io/gitea/services/notify"
+ pull_service "code.gitea.io/gitea/services/pull"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/gobwas/glob"
+)
+
+const (
+ tplFork base.TplName = "repo/pulls/fork"
+ tplCompareDiff base.TplName = "repo/diff/compare"
+ tplPullCommits base.TplName = "repo/pulls/commits"
+ tplPullFiles base.TplName = "repo/pulls/files"
+
+ pullRequestTemplateKey = "PullRequestTemplate"
+)
+
+var pullRequestTemplateCandidates = []string{
+ "PULL_REQUEST_TEMPLATE.md",
+ "PULL_REQUEST_TEMPLATE.yaml",
+ "PULL_REQUEST_TEMPLATE.yml",
+ "pull_request_template.md",
+ "pull_request_template.yaml",
+ "pull_request_template.yml",
+ ".forgejo/PULL_REQUEST_TEMPLATE.md",
+ ".forgejo/PULL_REQUEST_TEMPLATE.yaml",
+ ".forgejo/PULL_REQUEST_TEMPLATE.yml",
+ ".forgejo/pull_request_template.md",
+ ".forgejo/pull_request_template.yaml",
+ ".forgejo/pull_request_template.yml",
+ ".gitea/PULL_REQUEST_TEMPLATE.md",
+ ".gitea/PULL_REQUEST_TEMPLATE.yaml",
+ ".gitea/PULL_REQUEST_TEMPLATE.yml",
+ ".gitea/pull_request_template.md",
+ ".gitea/pull_request_template.yaml",
+ ".gitea/pull_request_template.yml",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/PULL_REQUEST_TEMPLATE.yaml",
+ ".github/PULL_REQUEST_TEMPLATE.yml",
+ ".github/pull_request_template.md",
+ ".github/pull_request_template.yaml",
+ ".github/pull_request_template.yml",
+}
+
+func getRepository(ctx *context.Context, repoID int64) *repo_model.Repository {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ ctx.NotFound("GetRepositoryByID", nil)
+ } else {
+ ctx.ServerError("GetRepositoryByID", err)
+ }
+ return nil
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return nil
+ }
+
+ if !perm.CanRead(unit.TypeCode) {
+ log.Trace("Permission Denied: User %-v cannot read %-v of repo %-v\n"+
+ "User in repo has Permissions: %-+v",
+ ctx.Doer,
+ unit.TypeCode,
+ ctx.Repo,
+ perm)
+ ctx.NotFound("getRepository", nil)
+ return nil
+ }
+ return repo
+}
+
+func updateForkRepositoryInContext(ctx *context.Context, forkRepo *repo_model.Repository) bool {
+ if forkRepo == nil {
+ ctx.NotFound("No repository in context", nil)
+ return false
+ }
+
+ if forkRepo.IsEmpty {
+ log.Trace("Empty repository %-v", forkRepo)
+ ctx.NotFound("updateForkRepositoryInContext", nil)
+ return false
+ }
+
+ if err := forkRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return false
+ }
+
+ ctx.Data["repo_name"] = forkRepo.Name
+ ctx.Data["description"] = forkRepo.Description
+ ctx.Data["IsPrivate"] = forkRepo.IsPrivate || forkRepo.Owner.Visibility == structs.VisibleTypePrivate
+ canForkToUser := forkRepo.OwnerID != ctx.Doer.ID && !repo_model.HasForkedRepo(ctx, ctx.Doer.ID, forkRepo.ID)
+
+ ctx.Data["ForkRepo"] = forkRepo
+
+ ownedOrgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetOrgsCanCreateRepoByUserID", err)
+ return false
+ }
+ var orgs []*organization.Organization
+ for _, org := range ownedOrgs {
+ if forkRepo.OwnerID != org.ID && !repo_model.HasForkedRepo(ctx, org.ID, forkRepo.ID) {
+ orgs = append(orgs, org)
+ }
+ }
+
+ traverseParentRepo := forkRepo
+ for {
+ if ctx.Doer.ID == traverseParentRepo.OwnerID {
+ canForkToUser = false
+ } else {
+ for i, org := range orgs {
+ if org.ID == traverseParentRepo.OwnerID {
+ orgs = append(orgs[:i], orgs[i+1:]...)
+ break
+ }
+ }
+ }
+
+ if !traverseParentRepo.IsFork {
+ break
+ }
+ traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return false
+ }
+ }
+
+ ctx.Data["CanForkToUser"] = canForkToUser
+ ctx.Data["Orgs"] = orgs
+
+ if canForkToUser {
+ ctx.Data["ContextUser"] = ctx.Doer
+ } else if len(orgs) > 0 {
+ ctx.Data["ContextUser"] = orgs[0]
+ } else {
+ ctx.Data["CanForkRepo"] = false
+ ctx.RenderWithErr(ctx.Tr("repo.fork_no_valid_owners"), tplFork, nil)
+ return false
+ }
+
+ branches, err := git_model.FindBranchNames(ctx, git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ ListOptions: db.ListOptions{
+ ListAll: true,
+ },
+ IsDeletedBranch: optional.Some(false),
+ // Add it as the first option
+ ExcludeBranchNames: []string{ctx.Repo.Repository.DefaultBranch},
+ })
+ if err != nil {
+ ctx.ServerError("FindBranchNames", err)
+ return false
+ }
+ ctx.Data["Branches"] = append([]string{ctx.Repo.Repository.DefaultBranch}, branches...)
+
+ return true
+}
+
+// ForkByID redirects (with 301 Moved Permanently) to the repository's `/fork` page
+func ForkByID(ctx *context.Context) {
+ ctx.Redirect(ctx.Repo.Repository.Link()+"/fork", http.StatusMovedPermanently)
+}
+
+// Fork renders the repository fork page
+func Fork(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("new_fork")
+
+ if ctx.Doer.CanForkRepo() {
+ ctx.Data["CanForkRepo"] = true
+ } else {
+ maxCreationLimit := ctx.Doer.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.Flash.Error(msg, true)
+ }
+
+ if !updateForkRepositoryInContext(ctx, ctx.Repo.Repository) {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplFork)
+}
+
+// ForkPost response for forking a repository
+func ForkPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateRepoForm)
+ ctx.Data["Title"] = ctx.Tr("new_fork")
+ ctx.Data["CanForkRepo"] = true
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+
+ forkRepo := ctx.Repo.Repository
+ if !updateForkRepositoryInContext(ctx, forkRepo) {
+ return
+ }
+
+ ctx.Data["ContextUser"] = ctxUser
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplFork)
+ return
+ }
+
+ var err error
+ traverseParentRepo := forkRepo
+ for {
+ if ctxUser.ID == traverseParentRepo.OwnerID {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
+ return
+ }
+ repo := repo_model.GetForkedRepo(ctx, ctxUser.ID, traverseParentRepo.ID)
+ if repo != nil {
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
+ return
+ }
+ if !traverseParentRepo.IsFork {
+ break
+ }
+ traverseParentRepo, err = repo_model.GetRepositoryByID(ctx, traverseParentRepo.ForkID)
+ if err != nil {
+ ctx.ServerError("GetRepositoryByID", err)
+ return
+ }
+ }
+
+ // Check if user is allowed to create repo's on the organization.
+ if ctxUser.IsOrganization() {
+ isAllowedToFork, err := organization.OrgFromUser(ctxUser).CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return
+ } else if !isAllowedToFork {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ repo, err := repo_service.ForkRepositoryAndUpdates(ctx, ctx.Doer, ctxUser, repo_service.ForkRepoOptions{
+ BaseRepo: forkRepo,
+ Name: form.RepoName,
+ Description: form.Description,
+ SingleBranch: form.ForkSingleBranch,
+ })
+ if err != nil {
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := ctxUser.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tplFork, &form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplFork, &form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tplFork, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tplFork, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tplFork, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tplFork, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplFork, &form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplFork, &form)
+ default:
+ ctx.ServerError("ForkPost", err)
+ }
+ return
+ }
+
+ log.Trace("Repository forked[%d]: %s/%s", forkRepo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(ctxUser.HomeLink() + "/" + url.PathEscape(repo.Name))
+}
+
+func getPullInfo(ctx *context.Context) (issue *issues_model.Issue, ok bool) {
+ issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrIssueNotExist(err) {
+ ctx.NotFound("GetIssueByIndex", err)
+ } else {
+ ctx.ServerError("GetIssueByIndex", err)
+ }
+ return nil, false
+ }
+ if err = issue.LoadPoster(ctx); err != nil {
+ ctx.ServerError("LoadPoster", err)
+ return nil, false
+ }
+ if err := issue.LoadRepo(ctx); err != nil {
+ ctx.ServerError("LoadRepo", err)
+ return nil, false
+ }
+ ctx.Data["Title"] = fmt.Sprintf("#%d - %s", issue.Index, emoji.ReplaceAliases(issue.Title))
+ ctx.Data["Issue"] = issue
+
+ if !issue.IsPull {
+ ctx.NotFound("ViewPullCommits", nil)
+ return nil, false
+ }
+
+ if err = issue.LoadPullRequest(ctx); err != nil {
+ ctx.ServerError("LoadPullRequest", err)
+ return nil, false
+ }
+
+ if err = issue.PullRequest.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return nil, false
+ }
+
+ if ctx.IsSigned {
+ // Update issue-user.
+ if err = activities_model.SetIssueReadBy(ctx, issue.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return nil, false
+ }
+ }
+
+ return issue, true
+}
+
+func setMergeTarget(ctx *context.Context, pull *issues_model.PullRequest) {
+ if ctx.Repo.Owner.Name == pull.MustHeadUserName(ctx) {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + ":" + pull.HeadBranch
+ } else {
+ ctx.Data["HeadTarget"] = pull.MustHeadUserName(ctx) + "/" + pull.HeadRepo.Name + ":" + pull.HeadBranch
+ }
+
+ if pull.Flow == issues_model.PullRequestFlowAGit {
+ ctx.Data["MadeUsingAGit"] = true
+ }
+
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["HeadBranchLink"] = pull.GetHeadBranchLink(ctx)
+ ctx.Data["BaseBranchLink"] = pull.GetBaseBranchLink(ctx)
+}
+
+// GetPullDiffStats get Pull Requests diff stats
+func GetPullDiffStats(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ mergeBaseCommitID := GetMergedBaseCommitID(ctx, issue)
+
+ if mergeBaseCommitID == "" {
+ ctx.NotFound("PullFiles", nil)
+ return
+ }
+
+ headCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+
+ diffOptions := &gitdiff.DiffOptions{
+ BeforeCommitID: mergeBaseCommitID,
+ AfterCommitID: headCommitID,
+ MaxLines: setting.Git.MaxGitDiffLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: setting.Git.MaxGitDiffFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }
+
+ diff, err := gitdiff.GetPullDiffStats(ctx.Repo.GitRepo, diffOptions)
+ if err != nil {
+ ctx.ServerError("GetPullDiffStats", err)
+ return
+ }
+
+ ctx.Data["Diff"] = diff
+}
+
+func GetMergedBaseCommitID(ctx *context.Context, issue *issues_model.Issue) string {
+ pull := issue.PullRequest
+
+ var baseCommit string
+ // Some migrated PR won't have any Base SHA and lose history, try to get one
+ if pull.MergeBase == "" {
+ var commitSHA, parentCommit string
+ // If there is a head or a patch file, and it is readable, grab info
+ commitSHA, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ // Head File does not exist, try the patch
+ commitSHA, err = ctx.Repo.GitRepo.ReadPatchCommit(pull.Index)
+ if err == nil {
+ // Recreate pull head in files for next time
+ if err := ctx.Repo.GitRepo.SetReference(pull.GetGitRefName(), commitSHA); err != nil {
+ log.Error("Could not write head file", err)
+ }
+ } else {
+ // There is no history available
+ log.Trace("No history file available for PR %d", pull.Index)
+ }
+ }
+ if commitSHA != "" {
+ // Get immediate parent of the first commit in the patch, grab history back
+ parentCommit, _, err = git.NewCommand(ctx, "rev-list", "-1", "--skip=1").AddDynamicArguments(commitSHA).RunStdString(&git.RunOpts{Dir: ctx.Repo.GitRepo.Path})
+ if err == nil {
+ parentCommit = strings.TrimSpace(parentCommit)
+ }
+ // Special case on Git < 2.25 that doesn't fail on immediate empty history
+ if err != nil || parentCommit == "" {
+ log.Info("No known parent commit for PR %d, error: %v", pull.Index, err)
+ // bring at least partial history if it can work
+ parentCommit = commitSHA
+ }
+ }
+ baseCommit = parentCommit
+ } else {
+ // Keep an empty history or original commit
+ baseCommit = pull.MergeBase
+ }
+
+ return baseCommit
+}
+
+// PrepareMergedViewPullInfo show meta information for a merged pull request view page
+func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
+ pull := issue.PullRequest
+
+ setMergeTarget(ctx, pull)
+ ctx.Data["HasMerged"] = true
+
+ baseCommit := GetMergedBaseCommitID(ctx, issue)
+
+ compareInfo, err := ctx.Repo.GitRepo.GetCompareInfo(ctx.Repo.Repository.RepoPath(),
+ baseCommit, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") || strings.Contains(err.Error(), "unknown revision or path not in the working tree") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+
+ if len(compareInfo.Commits) != 0 {
+ sha := compareInfo.Commits[0].ID.String()
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) != 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+ }
+
+ return compareInfo
+}
+
+// PrepareViewPullInfo show meta information for a pull request preview page
+func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.CompareInfo {
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+
+ repo := ctx.Repo.Repository
+ pull := issue.PullRequest
+
+ if err := pull.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return nil
+ }
+
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return nil
+ }
+
+ setMergeTarget(ctx, pull)
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, repo.ID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return nil
+ }
+ ctx.Data["EnableStatusCheck"] = pb != nil && pb.EnableStatusCheck
+
+ var baseGitRepo *git.Repository
+ if pull.BaseRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
+ baseGitRepo = ctx.Repo.GitRepo
+ } else {
+ baseGitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer baseGitRepo.Close()
+ }
+
+ if !baseGitRepo.IsBranchExist(pull.BaseBranch) {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+
+ sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
+ return nil
+ }
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) > 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
+ pull.MergeBase, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+ return compareInfo
+ }
+
+ var headBranchExist bool
+ var headBranchSha string
+ // HeadRepo may be missing
+ if pull.HeadRepo != nil {
+ headGitRepo, err := gitrepo.OpenRepository(ctx, pull.HeadRepo)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil
+ }
+ defer headGitRepo.Close()
+
+ if pull.Flow == issues_model.PullRequestFlowGithub {
+ headBranchExist = headGitRepo.IsBranchExist(pull.HeadBranch)
+ } else {
+ headBranchExist = git.IsReferenceExist(ctx, baseGitRepo.Path, pull.GetGitRefName())
+ }
+
+ if headBranchExist {
+ if pull.Flow != issues_model.PullRequestFlowGithub {
+ headBranchSha, err = baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ } else {
+ headBranchSha, err = headGitRepo.GetBranchCommitID(pull.HeadBranch)
+ }
+ if err != nil {
+ ctx.ServerError("GetBranchCommitID", err)
+ return nil
+ }
+ }
+ }
+
+ if headBranchExist {
+ var err error
+ ctx.Data["UpdateAllowed"], ctx.Data["UpdateByRebaseAllowed"], err = pull_service.IsUserAllowedToUpdate(ctx, pull, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToUpdate", err)
+ return nil
+ }
+ ctx.Data["GetCommitMessages"] = pull_service.GetSquashMergeCommitMessages(ctx, pull)
+ } else {
+ ctx.Data["GetCommitMessages"] = ""
+ }
+
+ sha, err := baseGitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.Data["IsPullRequestBroken"] = true
+ if pull.IsSameRepo() {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
+ } else {
+ ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
+ }
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+ ctx.ServerError(fmt.Sprintf("GetRefCommitID(%s)", pull.GetGitRefName()), err)
+ return nil
+ }
+
+ commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, repo.ID, sha, db.ListOptionsAll)
+ if err != nil {
+ ctx.ServerError("GetLatestCommitStatus", err)
+ return nil
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
+ }
+
+ if len(commitStatuses) > 0 {
+ ctx.Data["LatestCommitStatuses"] = commitStatuses
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(commitStatuses)
+ }
+
+ if pb != nil && pb.EnableStatusCheck {
+ var missingRequiredChecks []string
+ for _, requiredContext := range pb.StatusCheckContexts {
+ contextFound := false
+ matchesRequiredContext := createRequiredContextMatcher(requiredContext)
+ for _, presentStatus := range commitStatuses {
+ if matchesRequiredContext(presentStatus.Context) {
+ contextFound = true
+ break
+ }
+ }
+
+ if !contextFound {
+ missingRequiredChecks = append(missingRequiredChecks, requiredContext)
+ }
+ }
+ ctx.Data["MissingRequiredChecks"] = missingRequiredChecks
+
+ ctx.Data["is_context_required"] = func(context string) bool {
+ for _, c := range pb.StatusCheckContexts {
+ if c == context {
+ return true
+ }
+ if gp, err := glob.Compile(c); err != nil {
+ // All newly created status_check_contexts are checked to ensure they are valid glob expressions before being stored in the database.
+ // But some old status_check_context created before glob was introduced may be invalid glob expressions.
+ // So log the error here for debugging.
+ log.Error("compile glob %q: %v", c, err)
+ } else if gp.Match(context) {
+ return true
+ }
+ }
+ return false
+ }
+ ctx.Data["RequiredStatusCheckState"] = pull_service.MergeRequiredContextsCommitStatus(commitStatuses, pb.StatusCheckContexts)
+ }
+
+ ctx.Data["HeadBranchMovedOn"] = headBranchSha != sha
+ ctx.Data["HeadBranchCommitID"] = headBranchSha
+ ctx.Data["PullHeadCommitID"] = sha
+
+ if pull.HeadRepo == nil || !headBranchExist || (!pull.Issue.IsClosed && (headBranchSha != sha)) {
+ ctx.Data["IsPullRequestBroken"] = true
+ if pull.IsSameRepo() {
+ ctx.Data["HeadTarget"] = pull.HeadBranch
+ } else if pull.HeadRepo == nil {
+ ctx.Data["HeadTarget"] = ctx.Locale.Tr("repo.pull.deleted_branch", pull.HeadBranch)
+ } else {
+ ctx.Data["HeadTarget"] = pull.HeadRepo.OwnerName + ":" + pull.HeadBranch
+ }
+ }
+
+ compareInfo, err := baseGitRepo.GetCompareInfo(pull.BaseRepo.RepoPath(),
+ git.BranchPrefix+pull.BaseBranch, pull.GetGitRefName(), false, false)
+ if err != nil {
+ if strings.Contains(err.Error(), "fatal: Not a valid object name") {
+ ctx.Data["IsPullRequestBroken"] = true
+ ctx.Data["BaseTarget"] = pull.BaseBranch
+ ctx.Data["NumCommits"] = 0
+ ctx.Data["NumFiles"] = 0
+ return nil
+ }
+
+ ctx.ServerError("GetCompareInfo", err)
+ return nil
+ }
+
+ if compareInfo.HeadCommitID == compareInfo.MergeBase {
+ ctx.Data["IsNothingToCompare"] = true
+ }
+
+ if pull.IsWorkInProgress(ctx) {
+ ctx.Data["IsPullWorkInProgress"] = true
+ ctx.Data["WorkInProgressPrefix"] = pull.GetWorkInProgressPrefix(ctx)
+ }
+
+ if pull.IsFilesConflicted() {
+ ctx.Data["IsPullFilesConflicted"] = true
+ ctx.Data["ConflictedFiles"] = pull.ConflictedFiles
+ }
+
+ ctx.Data["NumCommits"] = len(compareInfo.Commits)
+ ctx.Data["NumFiles"] = compareInfo.NumFiles
+ return compareInfo
+}
+
+func createRequiredContextMatcher(requiredContext string) func(string) bool {
+ if gp, err := glob.Compile(requiredContext); err == nil {
+ return func(contextToCheck string) bool {
+ return gp.Match(contextToCheck)
+ }
+ }
+
+ return func(contextToCheck string) bool {
+ return requiredContext == contextToCheck
+ }
+}
+
+type pullCommitList struct {
+ Commits []pull_service.CommitInfo `json:"commits"`
+ LastReviewCommitSha string `json:"last_review_commit_sha"`
+ Locale map[string]any `json:"locale"`
+}
+
+// GetPullCommits get all commits for given pull request
+func GetPullCommits(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ resp := &pullCommitList{}
+
+ commits, lastReviewCommitSha, err := pull_service.GetPullCommits(ctx, issue)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+
+ // Get the needed locale
+ resp.Locale = map[string]any{
+ "lang": ctx.Locale.Language(),
+ "show_all_commits": ctx.Tr("repo.pulls.show_all_commits"),
+ "stats_num_commits": ctx.TrN(len(commits), "repo.activity.git_stats_commit_1", "repo.activity.git_stats_commit_n", len(commits)),
+ "show_changes_since_your_last_review": ctx.Tr("repo.pulls.show_changes_since_your_last_review"),
+ "select_commit_hold_shift_for_range": ctx.Tr("repo.pulls.select_commit_hold_shift_for_range"),
+ }
+
+ resp.Commits = commits
+ resp.LastReviewCommitSha = lastReviewCommitSha
+
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// ViewPullCommits show commits for a pull request
+func ViewPullCommits(ctx *context.Context) {
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullCommits"] = true
+
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var prInfo *git.CompareInfo
+ if pull.HasMerged {
+ prInfo = PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ prInfo = PrepareViewPullInfo(ctx, issue)
+ }
+
+ if ctx.Written() {
+ return
+ } else if prInfo == nil {
+ ctx.NotFound("ViewPullCommits", nil)
+ return
+ }
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+
+ commits := processGitCommits(ctx, prInfo.Commits)
+ ctx.Data["Commits"] = commits
+ ctx.Data["CommitCount"] = len(commits)
+
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+
+ // For PR commits page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+ getBranchData(ctx, issue)
+ ctx.HTML(http.StatusOK, tplPullCommits)
+}
+
+// ViewPullFiles render pull request changed files list page
+func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommit string, willShowSpecifiedCommitRange, willShowSpecifiedCommit bool) {
+ ctx.Data["PageIsPullList"] = true
+ ctx.Data["PageIsPullFiles"] = true
+
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var (
+ startCommitID string
+ endCommitID string
+ gitRepo = ctx.Repo.GitRepo
+ )
+
+ var prInfo *git.CompareInfo
+ if pull.HasMerged {
+ prInfo = PrepareMergedViewPullInfo(ctx, issue)
+ } else {
+ prInfo = PrepareViewPullInfo(ctx, issue)
+ }
+
+ // Validate the given commit sha to show (if any passed)
+ if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ foundStartCommit := len(specifiedStartCommit) == 0
+ foundEndCommit := len(specifiedEndCommit) == 0
+
+ if !(foundStartCommit && foundEndCommit) {
+ for _, commit := range prInfo.Commits {
+ if commit.ID.String() == specifiedStartCommit {
+ foundStartCommit = true
+ }
+ if commit.ID.String() == specifiedEndCommit {
+ foundEndCommit = true
+ }
+
+ if foundStartCommit && foundEndCommit {
+ break
+ }
+ }
+ }
+
+ if !(foundStartCommit && foundEndCommit) {
+ ctx.NotFound("Given SHA1 not found for this PR", nil)
+ return
+ }
+ }
+
+ if ctx.Written() {
+ return
+ } else if prInfo == nil {
+ ctx.NotFound("ViewPullFiles", nil)
+ return
+ }
+
+ headCommitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+
+ ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
+
+ if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ if len(specifiedEndCommit) > 0 {
+ endCommitID = specifiedEndCommit
+ } else {
+ endCommitID = headCommitID
+ }
+ if len(specifiedStartCommit) > 0 {
+ startCommitID = specifiedStartCommit
+ } else {
+ startCommitID = prInfo.MergeBase
+ }
+ ctx.Data["IsShowingAllCommits"] = false
+ } else {
+ endCommitID = headCommitID
+ startCommitID = prInfo.MergeBase
+ ctx.Data["IsShowingAllCommits"] = true
+ }
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+ ctx.Data["AfterCommitID"] = endCommitID
+ ctx.Data["BeforeCommitID"] = startCommitID
+
+ fileOnly := ctx.FormBool("file-only")
+
+ maxLines, maxFiles := setting.Git.MaxGitDiffLines, setting.Git.MaxGitDiffFiles
+ files := ctx.FormStrings("files")
+ if fileOnly && (len(files) == 2 || len(files) == 1) {
+ maxLines, maxFiles = -1, -1
+ }
+
+ diffOptions := &gitdiff.DiffOptions{
+ AfterCommitID: endCommitID,
+ SkipTo: ctx.FormString("skip-to"),
+ MaxLines: maxLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: maxFiles,
+ WhitespaceBehavior: gitdiff.GetWhitespaceFlag(ctx.Data["WhitespaceBehavior"].(string)),
+ }
+
+ if !willShowSpecifiedCommit {
+ diffOptions.BeforeCommitID = startCommitID
+ }
+
+ var methodWithError string
+ var diff *gitdiff.Diff
+
+ // if we're not logged in or only a single commit (or commit range) is shown we
+ // have to load only the diff and not get the viewed information
+ // as the viewed information is designed to be loaded only on latest PR
+ // diff and if you're signed in.
+ if !ctx.IsSigned || willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ diff, err = gitdiff.GetDiff(ctx, gitRepo, diffOptions, files...)
+ methodWithError = "GetDiff"
+ } else {
+ diff, err = gitdiff.SyncAndGetUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diffOptions, files...)
+ methodWithError = "SyncAndGetUserSpecificDiff"
+ }
+ if err != nil {
+ ctx.ServerError(methodWithError, err)
+ return
+ }
+
+ ctx.PageData["prReview"] = map[string]any{
+ "numberOfFiles": diff.NumFiles,
+ "numberOfViewedFiles": diff.NumViewedFiles,
+ }
+
+ if err = diff.LoadComments(ctx, issue, ctx.Doer, ctx.Data["ShowOutdatedComments"].(bool)); err != nil {
+ ctx.ServerError("LoadComments", err)
+ return
+ }
+
+ for _, file := range diff.Files {
+ for _, section := range file.Sections {
+ for _, line := range section.Lines {
+ for _, comments := range line.Conversations {
+ for _, comment := range comments {
+ if err := comment.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+ }
+ }
+ }
+ }
+ }
+
+ pb, err := git_model.GetFirstMatchProtectedBranchRule(ctx, pull.BaseRepoID, pull.BaseBranch)
+ if err != nil {
+ ctx.ServerError("LoadProtectedBranch", err)
+ return
+ }
+
+ if pb != nil {
+ glob := pb.GetProtectedFilePatterns()
+ if len(glob) != 0 {
+ for _, file := range diff.Files {
+ file.IsProtected = pb.IsProtectedFile(glob, file.Name)
+ }
+ }
+ }
+
+ ctx.Data["Diff"] = diff
+ ctx.Data["DiffNotAvailable"] = diff.NumFiles == 0
+
+ baseCommit, err := ctx.Repo.GitRepo.GetCommit(startCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+ commit, err := gitRepo.GetCommit(endCommitID)
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+
+ // determine if the user viewing the pull request can edit the head branch
+ if ctx.Doer != nil && pull.HeadRepo != nil && !pull.HasMerged {
+ headRepoPerm, err := access_model.GetUserRepoPermission(ctx, pull.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ ctx.Data["HeadBranchIsEditable"] = pull.HeadRepo.CanEnableEditor() && issues_model.CanMaintainerWriteToBranch(ctx, headRepoPerm, pull.HeadBranch, ctx.Doer)
+ ctx.Data["SourceRepoLink"] = pull.HeadRepo.Link()
+ ctx.Data["HeadBranch"] = pull.HeadBranch
+ }
+
+ if ctx.IsSigned && ctx.Doer != nil {
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ }
+
+ setCompareContext(ctx, baseCommit, commit, ctx.Repo.Owner.Name, ctx.Repo.Repository.Name)
+
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ handleTeamMentions(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ currentReview, err := issues_model.GetCurrentReview(ctx, ctx.Doer, issue)
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("GetCurrentReview", err)
+ return
+ }
+ numPendingCodeComments := int64(0)
+ if currentReview != nil {
+ numPendingCodeComments, err = issues_model.CountComments(ctx, &issues_model.FindCommentsOptions{
+ Type: issues_model.CommentTypeCode,
+ ReviewID: currentReview.ID,
+ IssueID: issue.ID,
+ })
+ if err != nil {
+ ctx.ServerError("CountComments", err)
+ return
+ }
+ }
+ ctx.Data["CurrentReview"] = currentReview
+ ctx.Data["PendingCodeCommentNumber"] = numPendingCodeComments
+
+ getBranchData(ctx, issue)
+ ctx.Data["IsIssuePoster"] = ctx.IsSigned && issue.IsPoster(ctx.Doer.ID)
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
+
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ // For files changed page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.HTML(http.StatusOK, tplPullFiles)
+}
+
+func ViewPullFilesForSingleCommit(ctx *context.Context) {
+ viewPullFiles(ctx, "", ctx.Params("sha"), true, true)
+}
+
+func ViewPullFilesForRange(ctx *context.Context) {
+ viewPullFiles(ctx, ctx.Params("shaFrom"), ctx.Params("shaTo"), true, false)
+}
+
+func ViewPullFilesStartingFromCommit(ctx *context.Context) {
+ viewPullFiles(ctx, "", ctx.Params("sha"), true, false)
+}
+
+func ViewPullFilesForAllCommitsOfPr(ctx *context.Context) {
+ viewPullFiles(ctx, "", "", false, false)
+}
+
+// UpdatePullRequest merge PR's baseBranch into headBranch
+func UpdatePullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ if issue.IsClosed {
+ ctx.NotFound("MergePullRequest", nil)
+ return
+ }
+ if issue.PullRequest.HasMerged {
+ ctx.NotFound("MergePullRequest", nil)
+ return
+ }
+
+ rebase := ctx.FormString("style") == "rebase"
+
+ if err := issue.PullRequest.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return
+ }
+ if err := issue.PullRequest.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return
+ }
+
+ allowedUpdateByMerge, allowedUpdateByRebase, err := pull_service.IsUserAllowedToUpdate(ctx, issue.PullRequest, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("IsUserAllowedToMerge", err)
+ return
+ }
+
+ // ToDo: add check if maintainers are allowed to change branch ... (need migration & co)
+ if (!allowedUpdateByMerge && !rebase) || (rebase && !allowedUpdateByRebase) {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed"))
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ // default merge commit message
+ message := fmt.Sprintf("Merge branch '%s' into %s", issue.PullRequest.BaseBranch, issue.PullRequest.HeadBranch)
+
+ if err = pull_service.Update(ctx, issue.PullRequest, ctx.Doer, message, rebase); err != nil {
+ if models.IsErrMergeConflicts(err) {
+ conflictError := err.(models.ErrMergeConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.merge_conflict"),
+ "Summary": ctx.Tr("repo.pulls.merge_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.Redirect(issue.Link())
+ return
+ } else if models.IsErrRebaseConflicts(err) {
+ conflictError := err.(models.ErrRebaseConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
+ "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("UpdatePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.Redirect(issue.Link())
+ return
+ }
+ ctx.Flash.Error(err.Error())
+ ctx.Redirect(issue.Link())
+ return
+ }
+
+ time.Sleep(1 * time.Second)
+
+ ctx.Flash.Success(ctx.Tr("repo.pulls.update_branch_success"))
+ ctx.Redirect(issue.Link())
+}
+
+// MergePullRequest response for merging pull request
+func MergePullRequest(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.MergePullRequestForm)
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ pr := issue.PullRequest
+ pr.Issue = issue
+ pr.Issue.Repo = ctx.Repo.Repository
+
+ manuallyMerged := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged
+
+ mergeCheckType := pull_service.MergeCheckTypeGeneral
+ if form.MergeWhenChecksSucceed {
+ mergeCheckType = pull_service.MergeCheckTypeAuto
+ }
+ if manuallyMerged {
+ mergeCheckType = pull_service.MergeCheckTypeManually
+ }
+
+ // start with merging by checking
+ if err := pull_service.CheckPullMergeable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, mergeCheckType, form.ForceMerge); err != nil {
+ switch {
+ case errors.Is(err, pull_service.ErrIsClosed):
+ if issue.IsPull {
+ ctx.JSONError(ctx.Tr("repo.pulls.is_closed"))
+ } else {
+ ctx.JSONError(ctx.Tr("repo.issues.closed_title"))
+ }
+ case errors.Is(err, pull_service.ErrUserNotAllowedToMerge):
+ ctx.JSONError(ctx.Tr("repo.pulls.update_not_allowed"))
+ case errors.Is(err, pull_service.ErrHasMerged):
+ ctx.JSONError(ctx.Tr("repo.pulls.has_merged"))
+ case errors.Is(err, pull_service.ErrIsWorkInProgress):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_wip"))
+ case errors.Is(err, pull_service.ErrNotMergeableState):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
+ case models.IsErrDisallowedToMerge(err):
+ ctx.JSONError(ctx.Tr("repo.pulls.no_merge_not_ready"))
+ case asymkey_service.IsErrWontSign(err):
+ ctx.JSONError(err.Error()) // has no translation ...
+ case errors.Is(err, pull_service.ErrDependenciesLeft):
+ ctx.JSONError(ctx.Tr("repo.issues.dependency.pr_close_blocked"))
+ default:
+ ctx.ServerError("WebCheck", err)
+ }
+
+ return
+ }
+
+ // handle manually-merged mark
+ if manuallyMerged {
+ if err := pull_service.MergedManually(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil {
+ switch {
+ case models.IsErrInvalidMergeStyle(err):
+ ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
+ case strings.Contains(err.Error(), "Wrong commit ID"):
+ ctx.JSONError(ctx.Tr("repo.pulls.wrong_commit_id"))
+ default:
+ ctx.ServerError("MergedManually", err)
+ }
+
+ return
+ }
+
+ ctx.JSONRedirect(issue.Link())
+ return
+ }
+
+ message := strings.TrimSpace(form.MergeTitleField)
+ if len(message) == 0 {
+ var err error
+ message, _, err = pull_service.GetDefaultMergeMessage(ctx, ctx.Repo.GitRepo, pr, repo_model.MergeStyle(form.Do))
+ if err != nil {
+ ctx.ServerError("GetDefaultMergeMessage", err)
+ return
+ }
+ }
+
+ form.MergeMessageField = strings.TrimSpace(form.MergeMessageField)
+ if len(form.MergeMessageField) > 0 {
+ message += "\n\n" + form.MergeMessageField
+ }
+
+ if form.MergeWhenChecksSucceed {
+ // delete all scheduled auto merges
+ _ = pull_model.DeleteScheduledAutoMerge(ctx, pr.ID)
+ // schedule auto merge
+ scheduled, err := automerge.ScheduleAutoMerge(ctx, ctx.Doer, pr, repo_model.MergeStyle(form.Do), message)
+ if err != nil {
+ ctx.ServerError("ScheduleAutoMerge", err)
+ return
+ } else if scheduled {
+ // nothing more to do ...
+ ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_newly_scheduled"))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, pr.Index))
+ return
+ }
+ }
+
+ if err := pull_service.Merge(ctx, pr, ctx.Doer, ctx.Repo.GitRepo, repo_model.MergeStyle(form.Do), form.HeadCommitID, message, false); err != nil {
+ if models.IsErrInvalidMergeStyle(err) {
+ ctx.JSONError(ctx.Tr("repo.pulls.invalid_merge_option"))
+ } else if models.IsErrMergeConflicts(err) {
+ conflictError := err.(models.ErrMergeConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.editor.merge_conflict"),
+ "Summary": ctx.Tr("repo.editor.merge_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrRebaseConflicts(err) {
+ conflictError := err.(models.ErrRebaseConflicts)
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.rebase_conflict", utils.SanitizeFlashErrorString(conflictError.CommitSHA)),
+ "Summary": ctx.Tr("repo.pulls.rebase_conflict_summary"),
+ "Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "<br>" + utils.SanitizeFlashErrorString(conflictError.StdOut),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrMergeUnrelatedHistories(err) {
+ log.Debug("MergeUnrelatedHistories error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.unrelated_histories"))
+ ctx.JSONRedirect(issue.Link())
+ } else if git.IsErrPushOutOfDate(err) {
+ log.Debug("MergePushOutOfDate error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.merge_out_of_date"))
+ ctx.JSONRedirect(issue.Link())
+ } else if models.IsErrSHADoesNotMatch(err) {
+ log.Debug("MergeHeadOutOfDate error: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.pulls.head_out_of_date"))
+ ctx.JSONRedirect(issue.Link())
+ } else if git.IsErrPushRejected(err) {
+ log.Debug("MergePushRejected error: %v", err)
+ pushrejErr := err.(*git.ErrPushRejected)
+ message := pushrejErr.Message
+ if len(message) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.push_rejected_no_message"))
+ } else {
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.push_rejected"),
+ "Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
+ })
+ if err != nil {
+ ctx.ServerError("MergePullRequest.HTMLString", err)
+ return
+ }
+ ctx.Flash.Error(flashError)
+ }
+ ctx.JSONRedirect(issue.Link())
+ } else {
+ ctx.ServerError("Merge", err)
+ }
+ return
+ }
+ log.Trace("Pull request merged: %d", pr.ID)
+
+ if err := stopTimerIfAvailable(ctx, ctx.Doer, issue); err != nil {
+ ctx.ServerError("stopTimerIfAvailable", err)
+ return
+ }
+
+ log.Trace("Pull request merged: %d", pr.ID)
+
+ if form.DeleteBranchAfterMerge {
+ // Don't cleanup when other pr use this branch as head branch
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+ if exist {
+ ctx.JSONRedirect(issue.Link())
+ return
+ }
+
+ var headRepo *git.Repository
+ if ctx.Repo != nil && ctx.Repo.Repository != nil && pr.HeadRepoID == ctx.Repo.Repository.ID && ctx.Repo.GitRepo != nil {
+ headRepo = ctx.Repo.GitRepo
+ } else {
+ headRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
+ return
+ }
+ defer headRepo.Close()
+ }
+ deleteBranch(ctx, pr, headRepo)
+ }
+
+ ctx.JSONRedirect(issue.Link())
+}
+
+// CancelAutoMergePullRequest cancels a scheduled pr
+func CancelAutoMergePullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ if err := automerge.RemoveScheduledAutoMerge(ctx, ctx.Doer, issue.PullRequest); err != nil {
+ if db.IsErrNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("repo.pulls.auto_merge_not_scheduled"))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+ ctx.ServerError("RemoveScheduledAutoMerge", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.pulls.auto_merge_canceled_schedule"))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d", ctx.Repo.RepoLink, issue.Index))
+}
+
+func stopTimerIfAvailable(ctx *context.Context, user *user_model.User, issue *issues_model.Issue) error {
+ if issues_model.StopwatchExists(ctx, user.ID, issue.ID) {
+ if err := issues_model.CreateOrStopIssueStopwatch(ctx, user, issue); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// CompareAndPullRequestPost response for creating pull request
+func CompareAndPullRequestPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateIssueForm)
+ ctx.Data["Title"] = ctx.Tr("repo.pulls.compare_changes")
+ ctx.Data["PageIsComparePull"] = true
+ ctx.Data["IsDiffCompare"] = true
+ ctx.Data["PullRequestWorkInProgressPrefixes"] = setting.Repository.PullRequest.WorkInProgressPrefixes
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+ ctx.Data["HasIssuesOrPullsWritePermission"] = ctx.Repo.CanWrite(unit.TypePullRequests)
+
+ var (
+ repo = ctx.Repo.Repository
+ attachments []string
+ )
+
+ ci := ParseCompareInfo(ctx)
+ defer func() {
+ if ci != nil && ci.HeadGitRepo != nil {
+ ci.HeadGitRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+
+ labelIDs, assigneeIDs, milestoneID, projectID := ValidateRepoMetas(ctx, *form, true)
+ if ctx.Written() {
+ return
+ }
+
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.JSONError(ctx.Tr("repo.issues.new.title_empty"))
+ return
+ }
+
+ content := form.Content
+ if filename := ctx.Req.Form.Get("template-file"); filename != "" {
+ if template, err := issue_template.UnmarshalFromRepo(ctx.Repo.GitRepo, ctx.Repo.Repository.DefaultBranch, filename); err == nil {
+ content = issue_template.RenderToMarkdown(template, ctx.Req.Form)
+ }
+ }
+
+ pullIssue := &issues_model.Issue{
+ RepoID: repo.ID,
+ Repo: repo,
+ Title: form.Title,
+ PosterID: ctx.Doer.ID,
+ Poster: ctx.Doer,
+ MilestoneID: milestoneID,
+ IsPull: true,
+ Content: content,
+ }
+ pullRequest := &issues_model.PullRequest{
+ HeadRepoID: ci.HeadRepo.ID,
+ BaseRepoID: repo.ID,
+ HeadBranch: ci.HeadBranch,
+ BaseBranch: ci.BaseBranch,
+ HeadRepo: ci.HeadRepo,
+ BaseRepo: repo,
+ MergeBase: ci.CompareInfo.MergeBase,
+ Type: issues_model.PullRequestGitea,
+ AllowMaintainerEdit: form.AllowMaintainerEdit,
+ }
+ // FIXME: check error in the case two people send pull request at almost same time, give nice error prompt
+ // instead of 500.
+
+ if err := pull_service.NewPullRequest(ctx, repo, pullIssue, labelIDs, attachments, pullRequest, assigneeIDs); err != nil {
+ switch {
+ case errors.Is(err, user_model.ErrBlockedByUser):
+ ctx.JSONError(ctx.Tr("repo.pulls.blocked_by_user"))
+ case repo_model.IsErrUserDoesNotHaveAccessToRepo(err):
+ ctx.Error(http.StatusBadRequest, "UserDoesNotHaveAccessToRepo", err.Error())
+ case git.IsErrPushRejected(err):
+ pushrejErr := err.(*git.ErrPushRejected)
+ message := pushrejErr.Message
+ if len(message) == 0 {
+ ctx.JSONError(ctx.Tr("repo.pulls.push_rejected_no_message"))
+ return
+ }
+ flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
+ "Message": ctx.Tr("repo.pulls.push_rejected"),
+ "Summary": ctx.Tr("repo.pulls.push_rejected_summary"),
+ "Details": utils.SanitizeFlashErrorString(pushrejErr.Message),
+ })
+ if err != nil {
+ ctx.ServerError("CompareAndPullRequest.HTMLString", err)
+ return
+ }
+ ctx.JSONError(flashError)
+ default:
+ // It's an unexpected error.
+ // If it happens, we should add another case to handle it.
+ log.Error("Unexpected error of NewPullRequest: %T %s", err, err)
+ ctx.ServerError("CompareAndPullRequest", err)
+ }
+ ctx.ServerError("NewPullRequest", err)
+ return
+ }
+
+ if projectID > 0 && ctx.Repo.CanWrite(unit.TypeProjects) {
+ if err := issues_model.IssueAssignOrRemoveProject(ctx, pullIssue, ctx.Doer, projectID, 0); err != nil {
+ if !errors.Is(err, util.ErrPermissionDenied) {
+ ctx.ServerError("IssueAssignOrRemoveProject", err)
+ return
+ }
+ }
+ }
+
+ log.Trace("Pull request created: %d/%d", repo.ID, pullIssue.ID)
+ ctx.JSONRedirect(pullIssue.Link())
+}
+
+// CleanUpPullRequest responses for delete merged branch when PR has been merged
+func CleanUpPullRequest(ctx *context.Context) {
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+
+ pr := issue.PullRequest
+
+ // Don't cleanup unmerged and unclosed PRs
+ if !pr.HasMerged && !issue.IsClosed {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ // Don't cleanup when there are other PR's that use this branch as head branch.
+ exist, err := issues_model.HasUnmergedPullRequestsByHeadInfo(ctx, pr.HeadRepoID, pr.HeadBranch)
+ if err != nil {
+ ctx.ServerError("HasUnmergedPullRequestsByHeadInfo", err)
+ return
+ }
+ if exist {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ ctx.ServerError("LoadHeadRepo", err)
+ return
+ } else if pr.HeadRepo == nil {
+ // Forked repository has already been deleted
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ } else if err = pr.LoadBaseRepo(ctx); err != nil {
+ ctx.ServerError("LoadBaseRepo", err)
+ return
+ } else if err = pr.HeadRepo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("HeadRepo.LoadOwner", err)
+ return
+ }
+
+ perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ if !perm.CanWrite(unit.TypeCode) {
+ ctx.NotFound("CleanUpPullRequest", nil)
+ return
+ }
+
+ fullBranchName := pr.HeadRepo.Owner.Name + "/" + pr.HeadBranch
+
+ var gitBaseRepo *git.Repository
+
+ // Assume that the base repo is the current context (almost certainly)
+ if ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.BaseRepoID && ctx.Repo.GitRepo != nil {
+ gitBaseRepo = ctx.Repo.GitRepo
+ } else {
+ // If not just open it
+ gitBaseRepo, err = gitrepo.OpenRepository(ctx, pr.BaseRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.BaseRepo.FullName()), err)
+ return
+ }
+ defer gitBaseRepo.Close()
+ }
+
+ // Now assume that the head repo is the same as the base repo (reasonable chance)
+ gitRepo := gitBaseRepo
+ // But if not: is it the same as the context?
+ if pr.BaseRepoID != pr.HeadRepoID && ctx.Repo != nil && ctx.Repo.Repository != nil && ctx.Repo.Repository.ID == pr.HeadRepoID && ctx.Repo.GitRepo != nil {
+ gitRepo = ctx.Repo.GitRepo
+ } else if pr.BaseRepoID != pr.HeadRepoID {
+ // Otherwise just load it up
+ gitRepo, err = gitrepo.OpenRepository(ctx, pr.HeadRepo)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("OpenRepository[%s]", pr.HeadRepo.FullName()), err)
+ return
+ }
+ defer gitRepo.Close()
+ }
+
+ defer func() {
+ ctx.JSONRedirect(issue.Link())
+ }()
+
+ // Check if branch has no new commits
+ headCommitID, err := gitBaseRepo.GetRefCommitID(pr.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+ branchCommitID, err := gitRepo.GetBranchCommitID(pr.HeadBranch)
+ if err != nil {
+ log.Error("GetBranchCommitID: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+ if headCommitID != branchCommitID {
+ ctx.Flash.Error(ctx.Tr("repo.branch.delete_branch_has_new_commits", fullBranchName))
+ return
+ }
+
+ deleteBranch(ctx, pr, gitRepo)
+}
+
+func deleteBranch(ctx *context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) {
+ fullBranchName := pr.HeadRepo.FullName() + ":" + pr.HeadBranch
+
+ if err := pull_service.RetargetChildrenOnMerge(ctx, ctx.Doer, pr); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ return
+ }
+
+ if err := repo_service.DeleteBranch(ctx, ctx.Doer, pr.HeadRepo, gitRepo, pr.HeadBranch); err != nil {
+ switch {
+ case git.IsErrBranchNotExist(err):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ case errors.Is(err, repo_service.ErrBranchIsDefault):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ case errors.Is(err, git_model.ErrBranchIsProtected):
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ default:
+ log.Error("DeleteBranch: %v", err)
+ ctx.Flash.Error(ctx.Tr("repo.branch.deletion_failed", fullBranchName))
+ }
+ return
+ }
+
+ if err := issues_model.AddDeletePRBranchComment(ctx, ctx.Doer, pr.BaseRepo, pr.IssueID, pr.HeadBranch); err != nil {
+ // Do not fail here as branch has already been deleted
+ log.Error("DeleteBranch: %v", err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.branch.deletion_success", fullBranchName))
+}
+
+// DownloadPullDiff render a pull's raw diff
+func DownloadPullDiff(ctx *context.Context) {
+ DownloadPullDiffOrPatch(ctx, false)
+}
+
+// DownloadPullPatch render a pull's raw patch
+func DownloadPullPatch(ctx *context.Context) {
+ DownloadPullDiffOrPatch(ctx, true)
+}
+
+// DownloadPullDiffOrPatch render a pull's raw diff or patch
+func DownloadPullDiffOrPatch(ctx *context.Context, patch bool) {
+ pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrPullRequestNotExist(err) {
+ ctx.NotFound("GetPullRequestByIndex", err)
+ } else {
+ ctx.ServerError("GetPullRequestByIndex", err)
+ }
+ return
+ }
+
+ binary := ctx.FormBool("binary")
+
+ if err := pull_service.DownloadDiffOrPatch(ctx, pr, ctx, patch, binary); err != nil {
+ ctx.ServerError("DownloadDiffOrPatch", err)
+ return
+ }
+}
+
+// UpdatePullRequestTarget change pull request's target branch
+func UpdatePullRequestTarget(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ pr := issue.PullRequest
+ if !issue.IsPull {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ targetBranch := ctx.FormTrim("target_branch")
+ if len(targetBranch) == 0 {
+ ctx.Error(http.StatusNoContent)
+ return
+ }
+
+ if err := pull_service.ChangeTargetBranch(ctx, pr, ctx.Doer, targetBranch); err != nil {
+ if issues_model.IsErrPullRequestAlreadyExists(err) {
+ err := err.(issues_model.ErrPullRequestAlreadyExists)
+
+ RepoRelPath := ctx.Repo.Owner.Name + "/" + ctx.Repo.Repository.Name
+ errorMessage := ctx.Tr("repo.pulls.has_pull_request", html.EscapeString(ctx.Repo.RepoLink+"/pulls/"+strconv.FormatInt(err.IssueID, 10)), html.EscapeString(RepoRelPath), err.IssueID) // FIXME: Creates url inside locale string
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if issues_model.IsErrIssueIsClosed(err) {
+ errorMessage := ctx.Tr("repo.pulls.is_closed")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if models.IsErrPullRequestHasMerged(err) {
+ errorMessage := ctx.Tr("repo.pulls.has_merged")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusConflict, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else if git_model.IsErrBranchesEqual(err) {
+ errorMessage := ctx.Tr("repo.pulls.nothing_to_compare")
+
+ ctx.Flash.Error(errorMessage)
+ ctx.JSON(http.StatusBadRequest, map[string]any{
+ "error": err.Error(),
+ "user_error": errorMessage,
+ })
+ } else {
+ ctx.ServerError("UpdatePullRequestTarget", err)
+ }
+ return
+ }
+ notify_service.PullRequestChangeTargetBranch(ctx, ctx.Doer, pr, targetBranch)
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "base_branch": pr.BaseBranch,
+ })
+}
+
+// SetAllowEdits allow edits from maintainers to PRs
+func SetAllowEdits(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateAllowEditsForm)
+
+ pr, err := issues_model.GetPullRequestByIndex(ctx, ctx.Repo.Repository.ID, ctx.ParamsInt64(":index"))
+ if err != nil {
+ if issues_model.IsErrPullRequestNotExist(err) {
+ ctx.NotFound("GetPullRequestByIndex", err)
+ } else {
+ ctx.ServerError("GetPullRequestByIndex", err)
+ }
+ return
+ }
+
+ if err := pull_service.SetAllowEdits(ctx, ctx.Doer, pr, form.AllowMaintainerEdit); err != nil {
+ if errors.Is(err, pull_service.ErrUserHasNoPermissionForAction) {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("SetAllowEdits", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "allow_maintainer_edit": pr.AllowMaintainerEdit,
+ })
+}
diff --git a/routers/web/repo/pull_review.go b/routers/web/repo/pull_review.go
new file mode 100644
index 0000000..e8a3c48
--- /dev/null
+++ b/routers/web/repo/pull_review.go
@@ -0,0 +1,316 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+
+ issues_model "code.gitea.io/gitea/models/issues"
+ pull_model "code.gitea.io/gitea/models/pull"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+const (
+ tplDiffConversation base.TplName = "repo/diff/conversation"
+ tplTimelineConversation base.TplName = "repo/issue/view_content/conversation"
+ tplNewComment base.TplName = "repo/diff/new_comment"
+)
+
+// RenderNewCodeCommentForm will render the form for creating a new review comment
+func RenderNewCodeCommentForm(ctx *context.Context) {
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+ currentReview, err := issues_model.GetCurrentReview(ctx, ctx.Doer, issue)
+ if err != nil && !issues_model.IsErrReviewNotExist(err) {
+ ctx.ServerError("GetCurrentReview", err)
+ return
+ }
+ ctx.Data["PageIsPullFiles"] = true
+ ctx.Data["Issue"] = issue
+ ctx.Data["CurrentReview"] = currentReview
+ pullHeadCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(issue.PullRequest.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+ ctx.Data["AfterCommitID"] = pullHeadCommitID
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+ ctx.HTML(http.StatusOK, tplNewComment)
+}
+
+// CreateCodeComment will create a code comment including an pending review if required
+func CreateCodeComment(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CodeCommentForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ signedLine := form.Line
+ if form.Side == "previous" {
+ signedLine *= -1
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ comment, err := pull_service.CreateCodeComment(ctx,
+ ctx.Doer,
+ ctx.Repo.GitRepo,
+ issue,
+ signedLine,
+ form.Content,
+ form.TreePath,
+ !form.SingleReview,
+ form.Reply,
+ form.LatestCommitID,
+ attachments,
+ )
+ if err != nil {
+ ctx.ServerError("CreateCodeComment", err)
+ return
+ }
+
+ if comment == nil {
+ log.Trace("Comment not created: %-v #%d[%d]", ctx.Repo.Repository, issue.Index, issue.ID)
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ log.Trace("Comment created: %-v #%d[%d] Comment[%d]", ctx.Repo.Repository, issue.Index, issue.ID, comment.ID)
+
+ renderConversation(ctx, comment, form.Origin)
+}
+
+// UpdateResolveConversation add or remove an Conversation resolved mark
+func UpdateResolveConversation(ctx *context.Context) {
+ origin := ctx.FormString("origin")
+ action := ctx.FormString("action")
+ commentID := ctx.FormInt64("comment_id")
+
+ comment, err := issues_model.GetCommentByID(ctx, commentID)
+ if err != nil {
+ ctx.ServerError("GetIssueByID", err)
+ return
+ }
+
+ if err = comment.LoadIssue(ctx); err != nil {
+ ctx.ServerError("comment.LoadIssue", err)
+ return
+ }
+
+ if comment.Issue.RepoID != ctx.Repo.Repository.ID {
+ ctx.NotFound("comment's repoID is incorrect", errors.New("comment's repoID is incorrect"))
+ return
+ }
+
+ var permResult bool
+ if permResult, err = issues_model.CanMarkConversation(ctx, comment.Issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ if !permResult {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if !comment.Issue.IsPull {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ if action == "Resolve" || action == "UnResolve" {
+ err = issues_model.MarkConversation(ctx, comment, ctx.Doer, action == "Resolve")
+ if err != nil {
+ ctx.ServerError("MarkConversation", err)
+ return
+ }
+ } else {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ renderConversation(ctx, comment, origin)
+}
+
+func renderConversation(ctx *context.Context, comment *issues_model.Comment, origin string) {
+ comments, err := issues_model.FetchCodeConversation(ctx, comment, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("FetchCodeCommentsByLine", err)
+ return
+ }
+ ctx.Data["PageIsPullFiles"] = (origin == "diff")
+
+ if err := comments.LoadAttachments(ctx); err != nil {
+ ctx.ServerError("LoadAttachments", err)
+ return
+ }
+
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "comment")
+
+ ctx.Data["comments"] = comments
+ if ctx.Data["CanMarkConversation"], err = issues_model.CanMarkConversation(ctx, comment.Issue, ctx.Doer); err != nil {
+ ctx.ServerError("CanMarkConversation", err)
+ return
+ }
+ ctx.Data["Issue"] = comment.Issue
+ if err = comment.Issue.LoadPullRequest(ctx); err != nil {
+ ctx.ServerError("comment.Issue.LoadPullRequest", err)
+ return
+ }
+ pullHeadCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(comment.Issue.PullRequest.GetGitRefName())
+ if err != nil {
+ ctx.ServerError("GetRefCommitID", err)
+ return
+ }
+ ctx.Data["AfterCommitID"] = pullHeadCommitID
+ if origin == "diff" {
+ ctx.HTML(http.StatusOK, tplDiffConversation)
+ } else if origin == "timeline" {
+ ctx.HTML(http.StatusOK, tplTimelineConversation)
+ }
+}
+
+// SubmitReview creates a review out of the existing pending review or creates a new one if no pending review exist
+func SubmitReview(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.SubmitReviewForm)
+ issue := GetActionIssue(ctx)
+ if ctx.Written() {
+ return
+ }
+ if !issue.IsPull {
+ return
+ }
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.Data["ErrorMsg"].(string))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+
+ reviewType := form.ReviewType()
+ switch reviewType {
+ case issues_model.ReviewTypeUnknown:
+ ctx.ServerError("ReviewType", fmt.Errorf("unknown ReviewType: %s", form.Type))
+ return
+
+ // can not approve/reject your own PR
+ case issues_model.ReviewTypeApprove, issues_model.ReviewTypeReject:
+ if issue.IsPoster(ctx.Doer.ID) {
+ var translated string
+ if reviewType == issues_model.ReviewTypeApprove {
+ translated = ctx.Locale.TrString("repo.issues.review.self.approval")
+ } else {
+ translated = ctx.Locale.TrString("repo.issues.review.self.rejection")
+ }
+
+ ctx.Flash.Error(translated)
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ return
+ }
+ }
+
+ var attachments []string
+ if setting.Attachment.Enabled {
+ attachments = form.Files
+ }
+
+ _, comm, err := pull_service.SubmitReview(ctx, ctx.Doer, ctx.Repo.GitRepo, issue, reviewType, form.Content, form.CommitID, attachments)
+ if err != nil {
+ if issues_model.IsContentEmptyErr(err) {
+ ctx.Flash.Error(ctx.Tr("repo.issues.review.content.empty"))
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d/files", ctx.Repo.RepoLink, issue.Index))
+ } else {
+ ctx.ServerError("SubmitReview", err)
+ }
+ return
+ }
+ ctx.JSONRedirect(fmt.Sprintf("%s/pulls/%d#%s", ctx.Repo.RepoLink, issue.Index, comm.HashTag()))
+}
+
+// DismissReview dismissing stale review by repo admin
+func DismissReview(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.DismissReviewForm)
+ comm, err := pull_service.DismissReview(ctx, form.ReviewID, ctx.Repo.Repository.ID, form.Message, ctx.Doer, true, true)
+ if err != nil {
+ if pull_service.IsErrDismissRequestOnClosedPR(err) {
+ ctx.Status(http.StatusForbidden)
+ return
+ }
+ ctx.ServerError("pull_service.DismissReview", err)
+ return
+ }
+
+ ctx.Redirect(fmt.Sprintf("%s/pulls/%d#%s", ctx.Repo.RepoLink, comm.Issue.Index, comm.HashTag()))
+}
+
+// viewedFilesUpdate Struct to parse the body of a request to update the reviewed files of a PR
+// If you want to implement an API to update the review, simply move this struct into modules.
+type viewedFilesUpdate struct {
+ Files map[string]bool `json:"files"`
+ HeadCommitSHA string `json:"headCommitSHA"`
+}
+
+func UpdateViewedFiles(ctx *context.Context) {
+ // Find corresponding PR
+ issue, ok := getPullInfo(ctx)
+ if !ok {
+ return
+ }
+ pull := issue.PullRequest
+
+ var data *viewedFilesUpdate
+ err := json.NewDecoder(ctx.Req.Body).Decode(&data)
+ if err != nil {
+ log.Warn("Attempted to update a review but could not parse request body: %v", err)
+ ctx.Resp.WriteHeader(http.StatusBadRequest)
+ return
+ }
+
+ // Expect the review to have been now if no head commit was supplied
+ if data.HeadCommitSHA == "" {
+ data.HeadCommitSHA = pull.HeadCommitID
+ }
+
+ updatedFiles := make(map[string]pull_model.ViewedState, len(data.Files))
+ for file, viewed := range data.Files {
+ // Only unviewed and viewed are possible, has-changed can not be set from the outside
+ state := pull_model.Unviewed
+ if viewed {
+ state = pull_model.Viewed
+ }
+ updatedFiles[file] = state
+ }
+
+ if err := pull_model.UpdateReviewState(ctx, ctx.Doer.ID, pull.ID, data.HeadCommitSHA, updatedFiles); err != nil {
+ ctx.ServerError("UpdateReview", err)
+ }
+}
diff --git a/routers/web/repo/pull_review_test.go b/routers/web/repo/pull_review_test.go
new file mode 100644
index 0000000..329e83f
--- /dev/null
+++ b/routers/web/repo/pull_review_test.go
@@ -0,0 +1,104 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/pull"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestRenderConversation(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ pr, _ := issues_model.GetPullRequestByID(db.DefaultContext, 2)
+ _ = pr.LoadIssue(db.DefaultContext)
+ _ = pr.Issue.LoadPoster(db.DefaultContext)
+ _ = pr.Issue.LoadRepo(db.DefaultContext)
+
+ run := func(name string, cb func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder)) {
+ t.Run(name, func(t *testing.T) {
+ ctx, resp := contexttest.MockContext(t, "/")
+ ctx.Render = templates.HTMLRenderer()
+ contexttest.LoadUser(t, ctx, pr.Issue.PosterID)
+ contexttest.LoadRepo(t, ctx, pr.BaseRepoID)
+ contexttest.LoadGitRepo(t, ctx)
+ defer ctx.Repo.GitRepo.Close()
+ cb(t, ctx, resp)
+ })
+ }
+
+ var preparedComment *issues_model.Comment
+ run("prepare", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ comment, err := pull.CreateCodeComment(ctx, pr.Issue.Poster, ctx.Repo.GitRepo, pr.Issue, 1, "content", "", false, 0, pr.HeadCommitID, nil)
+ require.NoError(t, err)
+
+ comment.Invalidated = true
+ err = issues_model.UpdateCommentInvalidate(ctx, comment)
+ require.NoError(t, err)
+
+ preparedComment = comment
+ })
+ if !assert.NotNil(t, preparedComment) {
+ return
+ }
+ run("diff with outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "diff")
+ assert.Contains(t, resp.Body.String(), `<div class="content comment-container"`)
+ })
+ run("diff without outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = false
+ renderConversation(ctx, preparedComment, "diff")
+ // unlike gitea, Forgejo renders the conversation (with the "outdated" label)
+ assert.Contains(t, resp.Body.String(), `repo.issues.review.outdated_description`)
+ })
+ run("timeline with outdated", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Contains(t, resp.Body.String(), `<div id="code-comments-`)
+ })
+ run("timeline is not affected by ShowOutdatedComments=false", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ ctx.Data["ShowOutdatedComments"] = false
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Contains(t, resp.Body.String(), `<div id="code-comments-`)
+ })
+ run("diff non-existing review", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ reviews, err := issues_model.FindReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ IssueID: 2,
+ })
+ require.NoError(t, err)
+ for _, r := range reviews {
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, r))
+ }
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "diff")
+ assert.Equal(t, http.StatusOK, resp.Code)
+ assert.NotContains(t, resp.Body.String(), `status-page-500`)
+ })
+ run("timeline non-existing review", func(t *testing.T, ctx *context.Context, resp *httptest.ResponseRecorder) {
+ reviews, err := issues_model.FindReviews(db.DefaultContext, issues_model.FindReviewOptions{
+ IssueID: 2,
+ })
+ require.NoError(t, err)
+ for _, r := range reviews {
+ require.NoError(t, issues_model.DeleteReview(db.DefaultContext, r))
+ }
+ ctx.Data["ShowOutdatedComments"] = true
+ renderConversation(ctx, preparedComment, "timeline")
+ assert.Equal(t, http.StatusOK, resp.Code)
+ assert.NotContains(t, resp.Body.String(), `status-page-500`)
+ })
+}
diff --git a/routers/web/repo/recent_commits.go b/routers/web/repo/recent_commits.go
new file mode 100644
index 0000000..c158fb3
--- /dev/null
+++ b/routers/web/repo/recent_commits.go
@@ -0,0 +1,41 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/services/context"
+ contributors_service "code.gitea.io/gitea/services/repository"
+)
+
+const (
+ tplRecentCommits base.TplName = "repo/activity"
+)
+
+// RecentCommits renders the page to show recent commit frequency on repository
+func RecentCommits(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.activity.navbar.recent_commits")
+
+ ctx.Data["PageIsActivity"] = true
+ ctx.Data["PageIsRecentCommits"] = true
+ ctx.PageData["repoLink"] = ctx.Repo.RepoLink
+
+ ctx.HTML(http.StatusOK, tplRecentCommits)
+}
+
+// RecentCommitsData returns JSON of recent commits data
+func RecentCommitsData(ctx *context.Context) {
+ if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
+ if errors.Is(err, contributors_service.ErrAwaitGeneration) {
+ ctx.Status(http.StatusAccepted)
+ return
+ }
+ ctx.ServerError("RecentCommitsData", err)
+ } else {
+ ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
+ }
+}
diff --git a/routers/web/repo/release.go b/routers/web/repo/release.go
new file mode 100644
index 0000000..2266deb
--- /dev/null
+++ b/routers/web/repo/release.go
@@ -0,0 +1,857 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/context/upload"
+ "code.gitea.io/gitea/services/forms"
+ releaseservice "code.gitea.io/gitea/services/release"
+)
+
+const (
+ tplReleasesList base.TplName = "repo/release/list"
+ tplReleaseNew base.TplName = "repo/release/new"
+ tplTagsList base.TplName = "repo/tag/list"
+)
+
+// calReleaseNumCommitsBehind calculates given release has how many commits behind release target.
+func calReleaseNumCommitsBehind(repoCtx *context.Repository, release *repo_model.Release, countCache map[string]int64) error {
+ target := release.Target
+ if target == "" {
+ target = repoCtx.Repository.DefaultBranch
+ }
+ // Get count if not cached
+ if _, ok := countCache[target]; !ok {
+ commit, err := repoCtx.GitRepo.GetBranchCommit(target)
+ if err != nil {
+ var errNotExist git.ErrNotExist
+ if target == repoCtx.Repository.DefaultBranch || !errors.As(err, &errNotExist) {
+ return fmt.Errorf("GetBranchCommit: %w", err)
+ }
+ // fallback to default branch
+ target = repoCtx.Repository.DefaultBranch
+ commit, err = repoCtx.GitRepo.GetBranchCommit(target)
+ if err != nil {
+ return fmt.Errorf("GetBranchCommit(DefaultBranch): %w", err)
+ }
+ }
+ countCache[target], err = commit.CommitsCount()
+ if err != nil {
+ return fmt.Errorf("CommitsCount: %w", err)
+ }
+ }
+ release.NumCommitsBehind = countCache[target] - release.NumCommits
+ release.TargetBehind = target
+ return nil
+}
+
+type ReleaseInfo struct {
+ Release *repo_model.Release
+ CommitStatus *git_model.CommitStatus
+ CommitStatuses []*git_model.CommitStatus
+}
+
+func getReleaseInfos(ctx *context.Context, opts *repo_model.FindReleasesOptions) ([]*ReleaseInfo, error) {
+ releases, err := db.Find[repo_model.Release](ctx, opts)
+ if err != nil {
+ return nil, err
+ }
+
+ for _, release := range releases {
+ release.Repo = ctx.Repo.Repository
+ }
+
+ if err = repo_model.GetReleaseAttachments(ctx, releases...); err != nil {
+ return nil, err
+ }
+
+ // Temporary cache commits count of used branches to speed up.
+ countCache := make(map[string]int64)
+ cacheUsers := make(map[int64]*user_model.User)
+ if ctx.Doer != nil {
+ cacheUsers[ctx.Doer.ID] = ctx.Doer
+ }
+ var ok bool
+
+ canReadActions := ctx.Repo.CanRead(unit.TypeActions)
+
+ releaseInfos := make([]*ReleaseInfo, 0, len(releases))
+ for _, r := range releases {
+ if r.Publisher, ok = cacheUsers[r.PublisherID]; !ok {
+ r.Publisher, err = user_model.GetUserByID(ctx, r.PublisherID)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ r.Publisher = user_model.NewGhostUser()
+ } else {
+ return nil, err
+ }
+ }
+ cacheUsers[r.PublisherID] = r.Publisher
+ }
+
+ r.RenderedNote, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, r.Note)
+ if err != nil {
+ return nil, err
+ }
+
+ err = r.LoadArchiveDownloadCount(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ if !r.IsDraft {
+ if err := calReleaseNumCommitsBehind(ctx.Repo, r, countCache); err != nil {
+ return nil, err
+ }
+ }
+
+ info := &ReleaseInfo{
+ Release: r,
+ }
+
+ if canReadActions {
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, r.Repo.ID, r.Sha1, db.ListOptionsAll)
+ if err != nil {
+ return nil, err
+ }
+
+ info.CommitStatus = git_model.CalcCommitStatus(statuses)
+ info.CommitStatuses = statuses
+ }
+
+ releaseInfos = append(releaseInfos, info)
+ }
+
+ return releaseInfos, nil
+}
+
+// Releases render releases list page
+func Releases(ctx *context.Context) {
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.release.releases")
+ ctx.Data["IsViewBranch"] = false
+ ctx.Data["IsViewTag"] = true
+ // Disable the showCreateNewBranch form in the dropdown on this page.
+ ctx.Data["CanCreateBranch"] = false
+ ctx.Data["HideBranchesInDropdown"] = true
+
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: ctx.FormInt("limit"),
+ }
+ if listOptions.PageSize == 0 {
+ listOptions.PageSize = setting.Repository.Release.DefaultPagingNum
+ }
+ if listOptions.PageSize > setting.API.MaxResponseItems {
+ listOptions.PageSize = setting.API.MaxResponseItems
+ }
+
+ writeAccess := ctx.Repo.CanWrite(unit.TypeReleases)
+ ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
+
+ releases, err := getReleaseInfos(ctx, &repo_model.FindReleasesOptions{
+ ListOptions: listOptions,
+ // only show draft releases for users who can write, read-only users shouldn't see draft releases.
+ IncludeDrafts: writeAccess,
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("getReleaseInfos", err)
+ return
+ }
+ for _, rel := range releases {
+ if rel.Release.IsTag && rel.Release.Title == "" {
+ rel.Release.Title = rel.Release.TagName
+ }
+ }
+
+ ctx.Data["Releases"] = releases
+ addVerifyTagToContext(ctx)
+
+ numReleases := ctx.Data["NumReleases"].(int64)
+ pager := context.NewPagination(int(numReleases), listOptions.PageSize, listOptions.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplReleasesList)
+}
+
+func verifyTagSignature(ctx *context.Context, r *repo_model.Release) (*asymkey.ObjectVerification, error) {
+ if err := r.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+ gitRepo, err := gitrepo.OpenRepository(ctx, r.Repo)
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ tag, err := gitRepo.GetTag(r.TagName)
+ if err != nil {
+ return nil, err
+ }
+ if tag.Signature == nil {
+ return nil, nil
+ }
+
+ verification := asymkey.ParseTagWithSignature(ctx, gitRepo, tag)
+ return verification, nil
+}
+
+func addVerifyTagToContext(ctx *context.Context) {
+ ctx.Data["VerifyTag"] = func(r *repo_model.Release) *asymkey.ObjectVerification {
+ v, err := verifyTagSignature(ctx, r)
+ if err != nil {
+ return nil
+ }
+ return v
+ }
+ ctx.Data["HasSignature"] = func(verification *asymkey.ObjectVerification) bool {
+ if verification == nil {
+ return false
+ }
+ return verification.Reason != "gpg.error.not_signed_commit"
+ }
+}
+
+// TagsList render tags list page
+func TagsList(ctx *context.Context) {
+ ctx.Data["PageIsTagList"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.release.tags")
+ ctx.Data["IsViewBranch"] = false
+ ctx.Data["IsViewTag"] = true
+ // Disable the showCreateNewBranch form in the dropdown on this page.
+ ctx.Data["CanCreateBranch"] = false
+ ctx.Data["HideBranchesInDropdown"] = true
+ ctx.Data["CanCreateRelease"] = ctx.Repo.CanWrite(unit.TypeReleases) && !ctx.Repo.Repository.IsArchived
+
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: ctx.FormInt("limit"),
+ }
+ if listOptions.PageSize == 0 {
+ listOptions.PageSize = setting.Repository.Release.DefaultPagingNum
+ }
+ if listOptions.PageSize > setting.API.MaxResponseItems {
+ listOptions.PageSize = setting.API.MaxResponseItems
+ }
+
+ opts := repo_model.FindReleasesOptions{
+ ListOptions: listOptions,
+ // for the tags list page, show all releases with real tags (having real commit-id),
+ // the drafts should also be included because a real tag might be used as a draft.
+ IncludeDrafts: true,
+ IncludeTags: true,
+ HasSha1: optional.Some(true),
+ RepoID: ctx.Repo.Repository.ID,
+ }
+
+ releases, err := db.Find[repo_model.Release](ctx, opts)
+ if err != nil {
+ ctx.ServerError("GetReleasesByRepoID", err)
+ return
+ }
+
+ ctx.Data["Releases"] = releases
+ addVerifyTagToContext(ctx)
+
+ numTags := ctx.Data["NumTags"].(int64)
+ pager := context.NewPagination(int(numTags), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+
+ ctx.Data["PageIsViewCode"] = !ctx.Repo.Repository.UnitEnabled(ctx, unit.TypeReleases)
+ ctx.HTML(http.StatusOK, tplTagsList)
+}
+
+// ReleasesFeedRSS get feeds for releases in RSS format
+func ReleasesFeedRSS(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, true, "rss")
+}
+
+// TagsListFeedRSS get feeds for tags in RSS format
+func TagsListFeedRSS(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, false, "rss")
+}
+
+// ReleasesFeedAtom get feeds for releases in Atom format
+func ReleasesFeedAtom(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, true, "atom")
+}
+
+// TagsListFeedAtom get feeds for tags in RSS format
+func TagsListFeedAtom(ctx *context.Context) {
+ releasesOrTagsFeed(ctx, false, "atom")
+}
+
+func releasesOrTagsFeed(ctx *context.Context, isReleasesOnly bool, formatType string) {
+ feed.ShowReleaseFeed(ctx, ctx.Repo.Repository, isReleasesOnly, formatType)
+}
+
+// SingleRelease renders a single release's page
+func SingleRelease(ctx *context.Context) {
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["DefaultBranch"] = ctx.Repo.Repository.DefaultBranch
+
+ writeAccess := ctx.Repo.CanWrite(unit.TypeReleases)
+ ctx.Data["CanCreateRelease"] = writeAccess && !ctx.Repo.Repository.IsArchived
+
+ releases, err := getReleaseInfos(ctx, &repo_model.FindReleasesOptions{
+ ListOptions: db.ListOptions{Page: 1, PageSize: 1},
+ RepoID: ctx.Repo.Repository.ID,
+ // Include tags in the search too.
+ IncludeTags: true,
+ TagNames: []string{ctx.Params("*")},
+ // only show draft releases for users who can write, read-only users shouldn't see draft releases.
+ IncludeDrafts: writeAccess,
+ })
+ if err != nil {
+ ctx.ServerError("getReleaseInfos", err)
+ return
+ }
+ if len(releases) != 1 {
+ ctx.NotFound("SingleRelease", err)
+ return
+ }
+
+ release := releases[0].Release
+ if release.IsTag && release.Title == "" {
+ release.Title = release.TagName
+ }
+ addVerifyTagToContext(ctx)
+
+ ctx.Data["PageIsSingleTag"] = release.IsTag
+ if release.IsTag {
+ ctx.Data["Title"] = release.TagName
+ } else {
+ ctx.Data["Title"] = release.Title
+ }
+
+ err = release.LoadArchiveDownloadCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadArchiveDownloadCount", err)
+ return
+ }
+
+ ctx.Data["Releases"] = releases
+ ctx.HTML(http.StatusOK, tplReleasesList)
+}
+
+// LatestRelease redirects to the latest release
+func LatestRelease(ctx *context.Context) {
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("LatestRelease", err)
+ return
+ }
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ return
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ ctx.Redirect(release.Link())
+}
+
+// NewRelease render creating or edit release page
+func NewRelease(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.release.new_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["tag_target"] = ctx.Repo.Repository.DefaultBranch
+ if tagName := ctx.FormString("tag"); len(tagName) > 0 {
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil && !repo_model.IsErrReleaseNotExist(err) {
+ ctx.ServerError("GetRelease", err)
+ return
+ }
+
+ if rel != nil {
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ ctx.Data["tag_name"] = rel.TagName
+ if rel.Target != "" {
+ ctx.Data["tag_target"] = rel.Target
+ }
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["attachments"] = rel.Attachments
+ }
+ }
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ upload.AddUploadContext(ctx, "release")
+
+ // For New Release page
+ PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ // We set the value of the hide_archive_link textbox depending on the latest release
+ latestRelease, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.Data["hide_archive_links"] = false
+ } else {
+ ctx.ServerError("GetLatestReleaseByRepoID", err)
+ return
+ }
+ }
+ if latestRelease != nil {
+ ctx.Data["hide_archive_links"] = latestRelease.HideArchiveLinks
+ }
+
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+}
+
+// NewReleasePost response for creating a release
+func NewReleasePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewReleaseForm)
+ ctx.Data["Title"] = ctx.Tr("repo.release.new_release")
+ ctx.Data["PageIsReleaseList"] = true
+
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetTagNamesByRepoID", err)
+ return
+ }
+ ctx.Data["Tags"] = tags
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+ return
+ }
+
+ objectFormat, _ := ctx.Repo.GitRepo.GetObjectFormat()
+
+ // form.Target can be a branch name or a full commitID.
+ if !ctx.Repo.GitRepo.IsBranchExist(form.Target) &&
+ len(form.Target) == objectFormat.FullLength() && !ctx.Repo.GitRepo.IsCommitExist(form.Target) {
+ ctx.RenderWithErr(ctx.Tr("form.target_branch_not_exist"), tplReleaseNew, &form)
+ return
+ }
+
+ // Title of release cannot be empty
+ if len(form.TagOnly) == 0 && len(form.Title) == 0 {
+ ctx.RenderWithErr(ctx.Tr("repo.release.title_empty"), tplReleaseNew, &form)
+ return
+ }
+
+ attachmentChanges := make(container.Set[*releaseservice.AttachmentChange])
+ attachmentChangesByID := make(map[string]*releaseservice.AttachmentChange)
+
+ if setting.Attachment.Enabled {
+ for _, uuid := range form.Files {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "attachment",
+ UUID: uuid,
+ })
+ }
+
+ const namePrefix = "attachment-new-name-"
+ const exturlPrefix = "attachment-new-exturl-"
+ for k, v := range ctx.Req.Form {
+ isNewName := strings.HasPrefix(k, namePrefix)
+ isNewExturl := strings.HasPrefix(k, exturlPrefix)
+ if isNewName || isNewExturl {
+ var id string
+ if isNewName {
+ id = k[len(namePrefix):]
+ } else if isNewExturl {
+ id = k[len(exturlPrefix):]
+ }
+ if _, ok := attachmentChangesByID[id]; !ok {
+ attachmentChangesByID[id] = &releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "external",
+ }
+ attachmentChanges.Add(attachmentChangesByID[id])
+ }
+ if isNewName {
+ attachmentChangesByID[id].Name = v[0]
+ } else if isNewExturl {
+ attachmentChangesByID[id].ExternalURL = v[0]
+ }
+ }
+ }
+ }
+
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, form.TagName)
+ if err != nil {
+ if !repo_model.IsErrReleaseNotExist(err) {
+ ctx.ServerError("GetRelease", err)
+ return
+ }
+
+ msg := ""
+ if len(form.Title) > 0 && form.AddTagMsg {
+ msg = form.Title + "\n\n" + form.Content
+ }
+
+ if len(form.TagOnly) > 0 {
+ if err = releaseservice.CreateNewTag(ctx, ctx.Doer, ctx.Repo.Repository, form.Target, form.TagName, msg); err != nil {
+ if models.IsErrTagAlreadyExists(err) {
+ e := err.(models.ErrTagAlreadyExists)
+ ctx.Flash.Error(ctx.Tr("repo.branch.tag_collision", e.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrInvalidTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_invalid"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL())
+ return
+ }
+
+ ctx.ServerError("releaseservice.CreateNewTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.tag.create_success", form.TagName))
+ ctx.Redirect(ctx.Repo.RepoLink + "/src/tag/" + util.PathEscapeSegments(form.TagName))
+ return
+ }
+
+ rel = &repo_model.Release{
+ RepoID: ctx.Repo.Repository.ID,
+ Repo: ctx.Repo.Repository,
+ PublisherID: ctx.Doer.ID,
+ Publisher: ctx.Doer,
+ Title: form.Title,
+ TagName: form.TagName,
+ Target: form.Target,
+ Note: form.Content,
+ IsDraft: len(form.Draft) > 0,
+ IsPrerelease: form.Prerelease,
+ HideArchiveLinks: form.HideArchiveLinks,
+ IsTag: false,
+ }
+
+ if err = releaseservice.CreateRelease(ctx.Repo.GitRepo, rel, msg, attachmentChanges.Values()); err != nil {
+ ctx.Data["Err_TagName"] = true
+ switch {
+ case repo_model.IsErrReleaseAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_already_exist"), tplReleaseNew, &form)
+ case models.IsErrInvalidTagName(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_invalid"), tplReleaseNew, &form)
+ case models.IsErrProtectedTagName(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_protected"), tplReleaseNew, &form)
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("CreateRelease", err)
+ }
+ return
+ }
+ } else {
+ if !rel.IsTag {
+ ctx.Data["Err_TagName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.release.tag_name_already_exist"), tplReleaseNew, &form)
+ return
+ }
+
+ rel.Title = form.Title
+ rel.Note = form.Content
+ rel.Target = form.Target
+ rel.IsDraft = len(form.Draft) > 0
+ rel.IsPrerelease = form.Prerelease
+ rel.PublisherID = ctx.Doer.ID
+ rel.HideArchiveLinks = form.HideArchiveLinks
+ rel.IsTag = false
+
+ if err = releaseservice.UpdateRelease(ctx, ctx.Doer, ctx.Repo.GitRepo, rel, true, attachmentChanges.Values()); err != nil {
+ ctx.Data["Err_TagName"] = true
+ switch {
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("UpdateRelease", err)
+ }
+ return
+ }
+ }
+ log.Trace("Release created: %s/%s:%s", ctx.Doer.LowerName, ctx.Repo.Repository.Name, form.TagName)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/releases")
+}
+
+// EditRelease render release edit page
+func EditRelease(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.release.edit_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["PageIsEditRelease"] = true
+ ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
+ upload.AddUploadContext(ctx, "release")
+
+ tagName := ctx.Params("*")
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetRelease", err)
+ } else {
+ ctx.ServerError("GetRelease", err)
+ }
+ return
+ }
+ ctx.Data["ID"] = rel.ID
+ ctx.Data["tag_name"] = rel.TagName
+ ctx.Data["tag_target"] = rel.Target
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["prerelease"] = rel.IsPrerelease
+ ctx.Data["hide_archive_links"] = rel.HideArchiveLinks
+ ctx.Data["IsDraft"] = rel.IsDraft
+
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ ctx.Data["attachments"] = rel.Attachments
+
+ // Get assignees.
+ assigneeUsers, err := repo_model.GetRepoAssignees(ctx, rel.Repo)
+ if err != nil {
+ ctx.ServerError("GetRepoAssignees", err)
+ return
+ }
+ ctx.Data["Assignees"] = MakeSelfOnTop(ctx.Doer, assigneeUsers)
+
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+}
+
+// EditReleasePost response for edit release
+func EditReleasePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditReleaseForm)
+ ctx.Data["Title"] = ctx.Tr("repo.release.edit_release")
+ ctx.Data["PageIsReleaseList"] = true
+ ctx.Data["PageIsEditRelease"] = true
+
+ tagName := ctx.Params("*")
+ rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, tagName)
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetRelease", err)
+ } else {
+ ctx.ServerError("GetRelease", err)
+ }
+ return
+ }
+ if rel.IsTag {
+ ctx.NotFound("GetRelease", err)
+ return
+ }
+ ctx.Data["tag_name"] = rel.TagName
+ ctx.Data["tag_target"] = rel.Target
+ ctx.Data["title"] = rel.Title
+ ctx.Data["content"] = rel.Note
+ ctx.Data["prerelease"] = rel.IsPrerelease
+ ctx.Data["hide_archive_links"] = rel.HideArchiveLinks
+
+ rel.Repo = ctx.Repo.Repository
+ if err := rel.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ // TODO: If an error occurs, do not forget the attachment edits the user made
+ // when displaying the error message.
+ ctx.Data["attachments"] = rel.Attachments
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplReleaseNew)
+ return
+ }
+
+ const delPrefix = "attachment-del-"
+ const editPrefix = "attachment-edit-"
+ const newPrefix = "attachment-new-"
+ const namePrefix = "name-"
+ const exturlPrefix = "exturl-"
+ attachmentChanges := make(container.Set[*releaseservice.AttachmentChange])
+ attachmentChangesByID := make(map[string]*releaseservice.AttachmentChange)
+
+ if setting.Attachment.Enabled {
+ for _, uuid := range form.Files {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "add",
+ Type: "attachment",
+ UUID: uuid,
+ })
+ }
+
+ for k, v := range ctx.Req.Form {
+ if strings.HasPrefix(k, delPrefix) && v[0] == "true" {
+ attachmentChanges.Add(&releaseservice.AttachmentChange{
+ Action: "delete",
+ UUID: k[len(delPrefix):],
+ })
+ } else {
+ isUpdatedName := strings.HasPrefix(k, editPrefix+namePrefix)
+ isUpdatedExturl := strings.HasPrefix(k, editPrefix+exturlPrefix)
+ isNewName := strings.HasPrefix(k, newPrefix+namePrefix)
+ isNewExturl := strings.HasPrefix(k, newPrefix+exturlPrefix)
+
+ if isUpdatedName || isUpdatedExturl || isNewName || isNewExturl {
+ var uuid string
+
+ if isUpdatedName {
+ uuid = k[len(editPrefix+namePrefix):]
+ } else if isUpdatedExturl {
+ uuid = k[len(editPrefix+exturlPrefix):]
+ } else if isNewName {
+ uuid = k[len(newPrefix+namePrefix):]
+ } else if isNewExturl {
+ uuid = k[len(newPrefix+exturlPrefix):]
+ }
+
+ if _, ok := attachmentChangesByID[uuid]; !ok {
+ attachmentChangesByID[uuid] = &releaseservice.AttachmentChange{
+ Type: "attachment",
+ UUID: uuid,
+ }
+ attachmentChanges.Add(attachmentChangesByID[uuid])
+ }
+
+ if isUpdatedName || isUpdatedExturl {
+ attachmentChangesByID[uuid].Action = "update"
+ } else if isNewName || isNewExturl {
+ attachmentChangesByID[uuid].Action = "add"
+ }
+
+ if isUpdatedName || isNewName {
+ attachmentChangesByID[uuid].Name = v[0]
+ } else if isUpdatedExturl || isNewExturl {
+ attachmentChangesByID[uuid].ExternalURL = v[0]
+ attachmentChangesByID[uuid].Type = "external"
+ }
+ }
+ }
+ }
+ }
+
+ rel.Title = form.Title
+ rel.Note = form.Content
+ rel.IsDraft = len(form.Draft) > 0
+ rel.IsPrerelease = form.Prerelease
+ rel.HideArchiveLinks = form.HideArchiveLinks
+ if err = releaseservice.UpdateRelease(ctx, ctx.Doer, ctx.Repo.GitRepo, rel, false, attachmentChanges.Values()); err != nil {
+ switch {
+ case repo_model.IsErrInvalidExternalURL(err):
+ ctx.RenderWithErr(ctx.Tr("repo.release.invalid_external_url", err.(repo_model.ErrInvalidExternalURL).ExternalURL), tplReleaseNew, &form)
+ default:
+ ctx.ServerError("UpdateRelease", err)
+ }
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/releases")
+}
+
+// DeleteRelease deletes a release
+func DeleteRelease(ctx *context.Context) {
+ deleteReleaseOrTag(ctx, false)
+}
+
+// DeleteTag deletes a tag
+func DeleteTag(ctx *context.Context) {
+ deleteReleaseOrTag(ctx, true)
+}
+
+func deleteReleaseOrTag(ctx *context.Context, isDelTag bool) {
+ redirect := func() {
+ if isDelTag {
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/tags")
+ return
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/releases")
+ }
+
+ rel, err := repo_model.GetReleaseForRepoByID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id"))
+ if err != nil {
+ if repo_model.IsErrReleaseNotExist(err) {
+ ctx.NotFound("GetReleaseForRepoByID", err)
+ } else {
+ ctx.Flash.Error("DeleteReleaseByID: " + err.Error())
+ redirect()
+ }
+ return
+ }
+
+ if err := releaseservice.DeleteReleaseByID(ctx, ctx.Repo.Repository, rel, ctx.Doer, isDelTag); err != nil {
+ if models.IsErrProtectedTagName(err) {
+ ctx.Flash.Error(ctx.Tr("repo.release.tag_name_protected"))
+ } else {
+ ctx.Flash.Error("DeleteReleaseByID: " + err.Error())
+ }
+ } else {
+ if isDelTag {
+ ctx.Flash.Success(ctx.Tr("repo.release.deletion_tag_success"))
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.release.deletion_success"))
+ }
+ }
+
+ redirect()
+}
diff --git a/routers/web/repo/release_test.go b/routers/web/repo/release_test.go
new file mode 100644
index 0000000..5c7b6e2
--- /dev/null
+++ b/routers/web/repo/release_test.go
@@ -0,0 +1,124 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestNewReleasePost(t *testing.T) {
+ for _, testCase := range []struct {
+ RepoID int64
+ UserID int64
+ TagName string
+ Form forms.NewReleaseForm
+ }{
+ {
+ RepoID: 1,
+ UserID: 2,
+ TagName: "v1.1", // pre-existing tag
+ Form: forms.NewReleaseForm{
+ TagName: "newtag",
+ Target: "master",
+ Title: "title",
+ Content: "content",
+ },
+ },
+ {
+ RepoID: 1,
+ UserID: 2,
+ TagName: "newtag",
+ Form: forms.NewReleaseForm{
+ TagName: "newtag",
+ Target: "master",
+ Title: "title",
+ Content: "content",
+ },
+ },
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/releases/new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ web.SetForm(ctx, &testCase.Form)
+ NewReleasePost(ctx)
+ unittest.AssertExistsAndLoadBean(t, &repo_model.Release{
+ RepoID: 1,
+ PublisherID: 2,
+ TagName: testCase.Form.TagName,
+ Target: testCase.Form.Target,
+ Title: testCase.Form.Title,
+ Note: testCase.Form.Content,
+ }, unittest.Cond("is_draft=?", len(testCase.Form.Draft) > 0))
+ ctx.Repo.GitRepo.Close()
+ }
+}
+
+func TestCalReleaseNumCommitsBehind(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo-release/releases")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 57)
+ contexttest.LoadGitRepo(t, ctx)
+ t.Cleanup(func() { ctx.Repo.GitRepo.Close() })
+
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: ctx.Repo.CanWrite(unit.TypeReleases),
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ require.NoError(t, err)
+
+ countCache := make(map[string]int64)
+ for _, release := range releases {
+ err := calReleaseNumCommitsBehind(ctx.Repo, release, countCache)
+ require.NoError(t, err)
+ }
+
+ type computedFields struct {
+ NumCommitsBehind int64
+ TargetBehind string
+ }
+ expectedComputation := map[string]computedFields{
+ "v1.0": {
+ NumCommitsBehind: 3,
+ TargetBehind: "main",
+ },
+ "v1.1": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ "v2.0": {
+ NumCommitsBehind: 0,
+ TargetBehind: "main",
+ },
+ "non-existing-target-branch": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ "empty-target-branch": {
+ NumCommitsBehind: 1,
+ TargetBehind: "main",
+ },
+ }
+ for _, r := range releases {
+ actual := computedFields{
+ NumCommitsBehind: r.NumCommitsBehind,
+ TargetBehind: r.TargetBehind,
+ }
+ assert.Equal(t, expectedComputation[r.TagName], actual, "wrong computed fields for %s: %#v", r.TagName, r)
+ }
+}
diff --git a/routers/web/repo/render.go b/routers/web/repo/render.go
new file mode 100644
index 0000000..e64db03
--- /dev/null
+++ b/routers/web/repo/render.go
@@ -0,0 +1,76 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "io"
+ "net/http"
+ "path"
+
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+// RenderFile renders a file by repos path
+func RenderFile(ctx *context.Context) {
+ blob, err := ctx.Repo.Commit.GetBlobByPath(ctx.Repo.TreePath)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("GetBlobByPath", err)
+ } else {
+ ctx.ServerError("GetBlobByPath", err)
+ }
+ return
+ }
+
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ ctx.ServerError("DataAsync", err)
+ return
+ }
+ defer dataRc.Close()
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ isTextFile := st.IsText()
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+ ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'; sandbox allow-scripts")
+
+ if markupType := markup.Type(blob.Name()); markupType == "" {
+ if isTextFile {
+ _, _ = io.Copy(ctx.Resp, rd)
+ } else {
+ http.Error(ctx.Resp, "Unsupported file type render", http.StatusInternalServerError)
+ }
+ return
+ }
+
+ err = markup.Render(&markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ InStandalonePage: true,
+ }, rd, ctx.Resp)
+ if err != nil {
+ log.Error("Failed to render file %q: %v", ctx.Repo.TreePath, err)
+ http.Error(ctx.Resp, "Failed to render file", http.StatusInternalServerError)
+ return
+ }
+}
diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go
new file mode 100644
index 0000000..9562491
--- /dev/null
+++ b/routers/web/repo/repo.go
@@ -0,0 +1,774 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "slices"
+ "strings"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/cache"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+ archiver_service "code.gitea.io/gitea/services/repository/archiver"
+ commitstatus_service "code.gitea.io/gitea/services/repository/commitstatus"
+)
+
+const (
+ tplCreate base.TplName = "repo/create"
+ tplAlertDetails base.TplName = "base/alert_details"
+)
+
+// MustBeNotEmpty render when a repo is a empty git dir
+func MustBeNotEmpty(ctx *context.Context) {
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.NotFound("MustBeNotEmpty", nil)
+ }
+}
+
+// MustBeEditable check that repo can be edited
+func MustBeEditable(ctx *context.Context) {
+ if !ctx.Repo.Repository.CanEnableEditor() || ctx.Repo.IsViewCommit {
+ ctx.NotFound("", nil)
+ return
+ }
+}
+
+// MustBeAbleToUpload check that repo can be uploaded to
+func MustBeAbleToUpload(ctx *context.Context) {
+ if !setting.Repository.Upload.Enabled {
+ ctx.NotFound("", nil)
+ }
+}
+
+func CommitInfoCache(ctx *context.Context) {
+ var err error
+ ctx.Repo.Commit, err = ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("GetBranchCommit", err)
+ return
+ }
+ ctx.Repo.CommitsCount, err = ctx.Repo.GetCommitsCount()
+ if err != nil {
+ ctx.ServerError("GetCommitsCount", err)
+ return
+ }
+ ctx.Data["CommitsCount"] = ctx.Repo.CommitsCount
+ ctx.Repo.GitRepo.LastCommitCache = git.NewLastCommitCache(ctx.Repo.CommitsCount, ctx.Repo.Repository.FullName(), ctx.Repo.GitRepo, cache.GetCache())
+}
+
+func checkContextUser(ctx *context.Context, uid int64) *user_model.User {
+ orgs, err := organization.GetOrgsCanCreateRepoByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetOrgsCanCreateRepoByUserID", err)
+ return nil
+ }
+
+ if !ctx.Doer.IsAdmin {
+ orgsAvailable := []*organization.Organization{}
+ for i := 0; i < len(orgs); i++ {
+ if orgs[i].CanCreateRepo() {
+ orgsAvailable = append(orgsAvailable, orgs[i])
+ }
+ }
+ ctx.Data["Orgs"] = orgsAvailable
+ } else {
+ ctx.Data["Orgs"] = orgs
+ }
+
+ // Not equal means current user is an organization.
+ if uid == ctx.Doer.ID || uid == 0 {
+ return ctx.Doer
+ }
+
+ org, err := user_model.GetUserByID(ctx, uid)
+ if user_model.IsErrUserNotExist(err) {
+ return ctx.Doer
+ }
+
+ if err != nil {
+ ctx.ServerError("GetUserByID", fmt.Errorf("[%d]: %w", uid, err))
+ return nil
+ }
+
+ // Check ownership of organization.
+ if !org.IsOrganization() {
+ ctx.Error(http.StatusForbidden)
+ return nil
+ }
+ if !ctx.Doer.IsAdmin {
+ canCreate, err := organization.OrgFromUser(org).CanCreateOrgRepo(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("CanCreateOrgRepo", err)
+ return nil
+ } else if !canCreate {
+ ctx.Error(http.StatusForbidden)
+ return nil
+ }
+ } else {
+ ctx.Data["Orgs"] = orgs
+ }
+ return org
+}
+
+func getRepoPrivate(ctx *context.Context) bool {
+ switch strings.ToLower(setting.Repository.DefaultPrivate) {
+ case setting.RepoCreatingLastUserVisibility:
+ return ctx.Doer.LastRepoVisibility
+ case setting.RepoCreatingPrivate:
+ return true
+ case setting.RepoCreatingPublic:
+ return false
+ default:
+ return ctx.Doer.LastRepoVisibility
+ }
+}
+
+// Create render creating repository page
+func Create(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("new_repo.title")
+
+ // Give default value for template to render.
+ ctx.Data["Gitignores"] = repo_module.Gitignores
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.Data["Licenses"] = repo_module.Licenses
+ ctx.Data["Readmes"] = repo_module.Readmes
+ ctx.Data["readme"] = "Default"
+ ctx.Data["private"] = getRepoPrivate(ctx)
+ ctx.Data["IsForcedPrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["default_branch"] = setting.Repository.DefaultBranch
+
+ ctxUser := checkContextUser(ctx, ctx.FormInt64("org"))
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ ctx.Data["repo_template_name"] = ctx.Tr("repo.template_select")
+ templateID := ctx.FormInt64("template_id")
+ if templateID > 0 {
+ templateRepo, err := repo_model.GetRepositoryByID(ctx, templateID)
+ if err == nil && access_model.CheckRepoUnitUser(ctx, templateRepo, ctxUser, unit.TypeCode) {
+ ctx.Data["repo_template"] = templateID
+ ctx.Data["repo_template_name"] = templateRepo.Name
+ }
+ }
+
+ ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo()
+ ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit()
+ ctx.Data["SupportedObjectFormats"] = git.SupportedObjectFormats
+ ctx.Data["DefaultObjectFormat"] = git.Sha1ObjectFormat
+
+ ctx.HTML(http.StatusOK, tplCreate)
+}
+
+func handleCreateError(ctx *context.Context, owner *user_model.User, err error, name string, tpl base.TplName, form any) {
+ switch {
+ case repo_model.IsErrReachLimitOfRepo(err):
+ maxCreationLimit := owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.RenderWithErr(msg, tpl, form)
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tpl, form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tpl, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tpl, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tpl, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tpl, form)
+ }
+ case db.IsErrNameReserved(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tpl, form)
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Data["Err_RepoName"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tpl, form)
+ default:
+ ctx.ServerError(name, err)
+ }
+}
+
+// CreatePost response for creating repository
+func CreatePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.CreateRepoForm)
+ ctx.Data["Title"] = ctx.Tr("new_repo.title")
+
+ ctx.Data["Gitignores"] = repo_module.Gitignores
+ ctx.Data["LabelTemplateFiles"] = repo_module.LabelTemplateFiles
+ ctx.Data["Licenses"] = repo_module.Licenses
+ ctx.Data["Readmes"] = repo_module.Readmes
+
+ ctx.Data["CanCreateRepo"] = ctx.Doer.CanCreateRepo()
+ ctx.Data["MaxCreationLimit"] = ctx.Doer.MaxCreationLimit()
+ ctx.Data["SupportedObjectFormats"] = git.SupportedObjectFormats
+ ctx.Data["DefaultObjectFormat"] = git.Sha1ObjectFormat
+
+ ctxUser := checkContextUser(ctx, form.UID)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctxUser.ID, ctxUser.Name) {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplCreate)
+ return
+ }
+
+ var repo *repo_model.Repository
+ var err error
+ if form.RepoTemplate > 0 {
+ opts := repo_service.GenerateRepoOptions{
+ Name: form.RepoName,
+ Description: form.Description,
+ Private: form.Private || setting.Repository.ForcePrivate,
+ GitContent: form.GitContent,
+ Topics: form.Topics,
+ GitHooks: form.GitHooks,
+ Webhooks: form.Webhooks,
+ Avatar: form.Avatar,
+ IssueLabels: form.Labels,
+ ProtectedBranch: form.ProtectedBranch,
+ }
+
+ if !opts.IsValid() {
+ ctx.RenderWithErr(ctx.Tr("repo.template.one_item"), tplCreate, form)
+ return
+ }
+
+ templateRepo := getRepository(ctx, form.RepoTemplate)
+ if ctx.Written() {
+ return
+ }
+
+ if !templateRepo.IsTemplate {
+ ctx.RenderWithErr(ctx.Tr("repo.template.invalid"), tplCreate, form)
+ return
+ }
+
+ repo, err = repo_service.GenerateRepository(ctx, ctx.Doer, ctxUser, templateRepo, opts)
+ if err == nil {
+ log.Trace("Repository generated [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(repo.Link())
+ return
+ }
+ } else {
+ repo, err = repo_service.CreateRepository(ctx, ctx.Doer, ctxUser, repo_service.CreateRepoOptions{
+ Name: form.RepoName,
+ Description: form.Description,
+ Gitignores: form.Gitignores,
+ IssueLabels: form.IssueLabels,
+ License: form.License,
+ Readme: form.Readme,
+ IsPrivate: form.Private || setting.Repository.ForcePrivate,
+ DefaultBranch: form.DefaultBranch,
+ AutoInit: form.AutoInit,
+ IsTemplate: form.Template,
+ TrustModel: repo_model.DefaultTrustModel,
+ ObjectFormatName: form.ObjectFormatName,
+ })
+ if err == nil {
+ log.Trace("Repository created [%d]: %s/%s", repo.ID, ctxUser.Name, repo.Name)
+ ctx.Redirect(repo.Link())
+ return
+ }
+ }
+
+ handleCreateError(ctx, ctxUser, err, "CreatePost", tplCreate, &form)
+}
+
+const (
+ tplWatchUnwatch base.TplName = "repo/watch_unwatch"
+ tplStarUnstar base.TplName = "repo/star_unstar"
+)
+
+func ActionWatch(watch bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ err := repo_model.WatchRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID, watch)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (watch, %t)", watch), err)
+ return
+ }
+
+ ctx.Data["IsWatchingRepo"] = repo_model.IsWatching(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+
+ // we have to reload the repository because NumStars or NumWatching (used in the templates) has just changed
+ ctx.Data["Repository"], err = repo_model.GetRepositoryByName(ctx, ctx.Repo.Repository.OwnerID, ctx.Repo.Repository.Name)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (watch, %t)", watch), err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplWatchUnwatch)
+ }
+}
+
+func ActionStar(star bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ err := repo_service.StarRepoAndSendLikeActivities(ctx, *ctx.Doer, ctx.Repo.Repository.ID, star)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (star, %t)", star), err)
+ return
+ }
+
+ ctx.Data["IsStaringRepo"] = repo_model.IsStaring(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+
+ // we have to reload the repository because NumStars or NumWatching (used in the templates) has just changed
+ ctx.Data["Repository"], err = repo_model.GetRepositoryByName(ctx, ctx.Repo.Repository.OwnerID, ctx.Repo.Repository.Name)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (star, %t)", star), err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplStarUnstar)
+ }
+}
+
+func ActionTransfer(accept bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ var action string
+ if accept {
+ action = "accept_transfer"
+ } else {
+ action = "reject_transfer"
+ }
+
+ ok, err := acceptOrRejectRepoTransfer(ctx, accept)
+ if err != nil {
+ ctx.ServerError(fmt.Sprintf("Action (%s)", action), err)
+ return
+ }
+ if !ok {
+ return
+ }
+
+ ctx.RedirectToFirst(ctx.FormString("redirect_to"), ctx.Repo.RepoLink)
+ }
+}
+
+func acceptOrRejectRepoTransfer(ctx *context.Context, accept bool) (bool, error) {
+ repoTransfer, err := models.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
+ if err != nil {
+ return false, err
+ }
+
+ if err := repoTransfer.LoadAttributes(ctx); err != nil {
+ return false, err
+ }
+
+ if !repoTransfer.CanUserAcceptTransfer(ctx, ctx.Doer) {
+ return false, errors.New("user does not have enough permissions")
+ }
+
+ if accept {
+ if !ctx.CheckQuota(quota_model.LimitSubjectSizeReposAll, ctx.Doer.ID, ctx.Doer.Name) {
+ return false, nil
+ }
+
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+
+ if err := repo_service.TransferOwnership(ctx, repoTransfer.Doer, repoTransfer.Recipient, ctx.Repo.Repository, repoTransfer.Teams); err != nil {
+ return false, err
+ }
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer.success"))
+ } else {
+ if err := repo_service.CancelRepositoryTransfer(ctx, ctx.Repo.Repository); err != nil {
+ return false, err
+ }
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer.rejected"))
+ }
+
+ ctx.Redirect(ctx.Repo.Repository.Link())
+ return true, nil
+}
+
+// RedirectDownload return a file based on the following infos:
+func RedirectDownload(ctx *context.Context) {
+ var (
+ vTag = ctx.Params("vTag")
+ fileName = ctx.Params("fileName")
+ )
+ tagNames := []string{vTag}
+ curRepo := ctx.Repo.Repository
+ releases, err := db.Find[repo_model.Release](ctx, repo_model.FindReleasesOptions{
+ IncludeDrafts: ctx.Repo.CanWrite(unit.TypeReleases),
+ RepoID: curRepo.ID,
+ TagNames: tagNames,
+ })
+ if err != nil {
+ ctx.ServerError("RedirectDownload", err)
+ return
+ }
+ if len(releases) == 1 {
+ release := releases[0]
+ att, err := repo_model.GetAttachmentByReleaseIDFileName(ctx, release.ID, fileName)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if att != nil {
+ ServeAttachment(ctx, att.UUID)
+ return
+ }
+ } else if len(releases) == 0 && vTag == "latest" {
+ // GitHub supports the alias "latest" for the latest release
+ // We only fetch the latest release if the tag is "latest" and no release with the tag "latest" exists
+ release, err := repo_model.GetLatestReleaseByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ att, err := repo_model.GetAttachmentByReleaseIDFileName(ctx, release.ID, fileName)
+ if err != nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if att != nil {
+ ServeAttachment(ctx, att.UUID)
+ return
+ }
+ }
+ ctx.Error(http.StatusNotFound)
+}
+
+// Download an archive of a repository
+func Download(ctx *context.Context) {
+ uri := ctx.Params("*")
+ aReq, err := archiver_service.NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri)
+ if err != nil {
+ if errors.Is(err, archiver_service.ErrUnknownArchiveFormat{}) {
+ ctx.Error(http.StatusBadRequest, err.Error())
+ } else if errors.Is(err, archiver_service.RepoRefNotFoundError{}) {
+ ctx.Error(http.StatusNotFound, err.Error())
+ } else {
+ ctx.ServerError("archiver_service.NewRequest", err)
+ }
+ return
+ }
+
+ archiver, err := aReq.Await(ctx)
+ if err != nil {
+ ctx.ServerError("archiver.Await", err)
+ return
+ }
+
+ download(ctx, aReq.GetArchiveName(), archiver)
+}
+
+func download(ctx *context.Context, archiveName string, archiver *repo_model.RepoArchiver) {
+ downloadName := ctx.Repo.Repository.Name + "-" + archiveName
+
+ // Add nix format link header so tarballs lock correctly:
+ // https://github.com/nixos/nix/blob/56763ff918eb308db23080e560ed2ea3e00c80a7/doc/manual/src/protocols/tarball-fetcher.md
+ ctx.Resp.Header().Add("Link", fmt.Sprintf("<%s/archive/%s.tar.gz?rev=%s>; rel=\"immutable\"",
+ ctx.Repo.Repository.APIURL(),
+ archiver.CommitID, archiver.CommitID))
+
+ rPath := archiver.RelativePath()
+ if setting.RepoArchive.Storage.MinioConfig.ServeDirect {
+ // If we have a signed url (S3, object storage), redirect to this directly.
+ u, err := storage.RepoArchives.URL(rPath, downloadName)
+ if u != nil && err == nil {
+ if archiver.ReleaseID != 0 {
+ err = repo_model.CountArchiveDownload(ctx, ctx.Repo.Repository.ID, archiver.ReleaseID, archiver.Type)
+ if err != nil {
+ ctx.ServerError("CountArchiveDownload", err)
+ return
+ }
+ }
+
+ ctx.Redirect(u.String())
+ return
+ }
+ }
+
+ // If we have matched and access to release or issue
+ fr, err := storage.RepoArchives.Open(rPath)
+ if err != nil {
+ ctx.ServerError("Open", err)
+ return
+ }
+ defer fr.Close()
+
+ if archiver.ReleaseID != 0 {
+ err = repo_model.CountArchiveDownload(ctx, ctx.Repo.Repository.ID, archiver.ReleaseID, archiver.Type)
+ if err != nil {
+ ctx.ServerError("CountArchiveDownload", err)
+ return
+ }
+ }
+
+ ctx.ServeContent(fr, &context.ServeHeaderOptions{
+ Filename: downloadName,
+ LastModified: archiver.CreatedUnix.AsLocalTime(),
+ })
+}
+
+// InitiateDownload will enqueue an archival request, as needed. It may submit
+// a request that's already in-progress, but the archiver service will just
+// kind of drop it on the floor if this is the case.
+func InitiateDownload(ctx *context.Context) {
+ uri := ctx.Params("*")
+ aReq, err := archiver_service.NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, uri)
+ if err != nil {
+ ctx.ServerError("archiver_service.NewRequest", err)
+ return
+ }
+ if aReq == nil {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ archiver, err := repo_model.GetRepoArchiver(ctx, aReq.RepoID, aReq.Type, aReq.CommitID)
+ if err != nil {
+ ctx.ServerError("archiver_service.StartArchive", err)
+ return
+ }
+ if archiver == nil || archiver.Status != repo_model.ArchiverReady {
+ if err := archiver_service.StartArchive(aReq); err != nil {
+ ctx.ServerError("archiver_service.StartArchive", err)
+ return
+ }
+ }
+
+ var completed bool
+ if archiver != nil && archiver.Status == repo_model.ArchiverReady {
+ completed = true
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "complete": completed,
+ })
+}
+
+// SearchRepo repositories via options
+func SearchRepo(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+ opts := &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ },
+ Actor: ctx.Doer,
+ Keyword: ctx.FormTrim("q"),
+ OwnerID: ctx.FormInt64("uid"),
+ PriorityOwnerID: ctx.FormInt64("priority_owner_id"),
+ TeamID: ctx.FormInt64("team_id"),
+ TopicOnly: ctx.FormBool("topic"),
+ Collaborate: optional.None[bool](),
+ Private: ctx.IsSigned && (ctx.FormString("private") == "" || ctx.FormBool("private")),
+ Template: optional.None[bool](),
+ StarredByID: ctx.FormInt64("starredBy"),
+ IncludeDescription: ctx.FormBool("includeDesc"),
+ }
+
+ if ctx.FormString("template") != "" {
+ opts.Template = optional.Some(ctx.FormBool("template"))
+ }
+
+ if ctx.FormBool("exclusive") {
+ opts.Collaborate = optional.Some(false)
+ }
+
+ mode := ctx.FormString("mode")
+ switch mode {
+ case "source":
+ opts.Fork = optional.Some(false)
+ opts.Mirror = optional.Some(false)
+ case "fork":
+ opts.Fork = optional.Some(true)
+ case "mirror":
+ opts.Mirror = optional.Some(true)
+ case "collaborative":
+ opts.Mirror = optional.Some(false)
+ opts.Collaborate = optional.Some(true)
+ case "":
+ default:
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid search mode: \"%s\"", mode))
+ return
+ }
+
+ if ctx.FormString("archived") != "" {
+ opts.Archived = optional.Some(ctx.FormBool("archived"))
+ }
+
+ if ctx.FormString("is_private") != "" {
+ opts.IsPrivate = optional.Some(ctx.FormBool("is_private"))
+ }
+
+ sortMode := ctx.FormString("sort")
+ if len(sortMode) > 0 {
+ sortOrder := ctx.FormString("order")
+ if len(sortOrder) == 0 {
+ sortOrder = "asc"
+ }
+ if searchModeMap, ok := repo_model.OrderByMap[sortOrder]; ok {
+ if orderBy, ok := searchModeMap[sortMode]; ok {
+ opts.OrderBy = orderBy
+ } else {
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort mode: \"%s\"", sortMode))
+ return
+ }
+ } else {
+ ctx.Error(http.StatusUnprocessableEntity, fmt.Sprintf("Invalid sort order: \"%s\"", sortOrder))
+ return
+ }
+ }
+
+ // To improve performance when only the count is requested
+ if ctx.FormBool("count_only") {
+ if count, err := repo_model.CountRepository(ctx, opts); err != nil {
+ log.Error("CountRepository: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil) // frontend JS doesn't handle error response (same as below)
+ } else {
+ ctx.SetTotalCountHeader(count)
+ ctx.JSONOK()
+ }
+ return
+ }
+
+ repos, count, err := repo_model.SearchRepository(ctx, opts)
+ if err != nil {
+ log.Error("SearchRepository: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil)
+ return
+ }
+
+ ctx.SetTotalCountHeader(count)
+
+ latestCommitStatuses, err := commitstatus_service.FindReposLastestCommitStatuses(ctx, repos)
+ if err != nil {
+ log.Error("FindReposLastestCommitStatuses: %v", err)
+ ctx.JSON(http.StatusInternalServerError, nil)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, latestCommitStatuses)
+ }
+
+ results := make([]*repo_service.WebSearchRepository, len(repos))
+ for i, repo := range repos {
+ results[i] = &repo_service.WebSearchRepository{
+ Repository: &api.Repository{
+ ID: repo.ID,
+ FullName: repo.FullName(),
+ Fork: repo.IsFork,
+ Private: repo.IsPrivate,
+ Template: repo.IsTemplate,
+ Mirror: repo.IsMirror,
+ Stars: repo.NumStars,
+ HTMLURL: repo.HTMLURL(),
+ Link: repo.Link(),
+ Internal: !repo.IsPrivate && repo.Owner.Visibility == api.VisibleTypePrivate,
+ },
+ }
+
+ if latestCommitStatuses[i] != nil {
+ results[i].LatestCommitStatus = latestCommitStatuses[i]
+ results[i].LocaleLatestCommitStatus = latestCommitStatuses[i].LocaleString(ctx.Locale)
+ }
+ }
+
+ ctx.JSON(http.StatusOK, repo_service.WebSearchResults{
+ OK: true,
+ Data: results,
+ })
+}
+
+type branchTagSearchResponse struct {
+ Results []string `json:"results"`
+}
+
+// GetBranchesList get branches for current repo'
+func GetBranchesList(ctx *context.Context) {
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsDeletedBranch: optional.Some(false),
+ ListOptions: db.ListOptionsAll,
+ }
+ branches, err := git_model.FindBranchNames(ctx, branchOpts)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+ resp := &branchTagSearchResponse{}
+ // always put default branch on the top if it exists
+ if slices.Contains(branches, ctx.Repo.Repository.DefaultBranch) {
+ branches = util.SliceRemoveAll(branches, ctx.Repo.Repository.DefaultBranch)
+ branches = append([]string{ctx.Repo.Repository.DefaultBranch}, branches...)
+ }
+ resp.Results = branches
+ ctx.JSON(http.StatusOK, resp)
+}
+
+// GetTagList get tag list for current repo
+func GetTagList(ctx *context.Context) {
+ tags, err := repo_model.GetTagNamesByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, err)
+ return
+ }
+ resp := &branchTagSearchResponse{}
+ resp.Results = tags
+ ctx.JSON(http.StatusOK, resp)
+}
+
+func PrepareBranchList(ctx *context.Context) {
+ branchOpts := git_model.FindBranchOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ IsDeletedBranch: optional.Some(false),
+ ListOptions: db.ListOptionsAll,
+ }
+ brs, err := git_model.FindBranchNames(ctx, branchOpts)
+ if err != nil {
+ ctx.ServerError("GetBranches", err)
+ return
+ }
+ // always put default branch on the top if it exists
+ if slices.Contains(brs, ctx.Repo.Repository.DefaultBranch) {
+ brs = util.SliceRemoveAll(brs, ctx.Repo.Repository.DefaultBranch)
+ brs = append([]string{ctx.Repo.Repository.DefaultBranch}, brs...)
+ }
+ ctx.Data["Branches"] = brs
+}
diff --git a/routers/web/repo/search.go b/routers/web/repo/search.go
new file mode 100644
index 0000000..c4f9f9a
--- /dev/null
+++ b/routers/web/repo/search.go
@@ -0,0 +1,105 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const tplSearch base.TplName = "repo/search"
+
+// Search render repository search page
+func Search(ctx *context.Context) {
+ language := ctx.FormTrim("l")
+ keyword := ctx.FormTrim("q")
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Language"] = language
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["PageIsViewCode"] = true
+
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplSearch)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ var total int
+ var searchResults []*code_indexer.Result
+ var searchResultLanguages []*code_indexer.SearchResultLanguages
+ if setting.Indexer.RepoIndexerEnabled {
+ var err error
+ total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, &code_indexer.SearchOptions{
+ RepoIDs: []int64{ctx.Repo.Repository.ID},
+ Keyword: keyword,
+ IsKeywordFuzzy: isFuzzy,
+ Language: language,
+ Paginator: &db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.RepoSearchPagingNum,
+ },
+ })
+ if err != nil {
+ if code_indexer.IsAvailable(ctx) {
+ ctx.ServerError("SearchResults", err)
+ return
+ }
+ ctx.Data["CodeIndexerUnavailable"] = true
+ } else {
+ ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
+ }
+ } else {
+ res, err := git.GrepSearch(ctx, ctx.Repo.GitRepo, keyword, git.GrepOptions{
+ ContextLineNumber: 1,
+ IsFuzzy: isFuzzy,
+ RefName: ctx.Repo.RefName,
+ })
+ if err != nil {
+ ctx.ServerError("GrepSearch", err)
+ return
+ }
+ total = len(res)
+ pageStart := min((page-1)*setting.UI.RepoSearchPagingNum, len(res))
+ pageEnd := min(page*setting.UI.RepoSearchPagingNum, len(res))
+ res = res[pageStart:pageEnd]
+ for _, r := range res {
+ searchResults = append(searchResults, &code_indexer.Result{
+ RepoID: ctx.Repo.Repository.ID,
+ Filename: r.Filename,
+ CommitID: ctx.Repo.CommitID,
+ // UpdatedUnix: not supported yet
+ // Language: not supported yet
+ // Color: not supported yet
+ Lines: code_indexer.HighlightSearchResultCode(r.Filename, r.LineNumbers, r.HighlightedRanges, strings.Join(r.LineCodes, "\n")),
+ })
+ }
+ }
+
+ ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
+ ctx.Data["Repo"] = ctx.Repo.Repository
+ ctx.Data["SourcePath"] = ctx.Repo.Repository.Link()
+ ctx.Data["SearchResults"] = searchResults
+ ctx.Data["SearchResultLanguages"] = searchResultLanguages
+
+ pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "l", "Language")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplSearch)
+}
diff --git a/routers/web/repo/setting/avatar.go b/routers/web/repo/setting/avatar.go
new file mode 100644
index 0000000..504f57c
--- /dev/null
+++ b/routers/web/repo/setting/avatar.go
@@ -0,0 +1,76 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "io"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// UpdateAvatarSetting update repo's avatar
+func UpdateAvatarSetting(ctx *context.Context, form forms.AvatarForm) error {
+ ctxRepo := ctx.Repo.Repository
+
+ if form.Avatar == nil {
+ // No avatar is uploaded and we not removing it here.
+ // No random avatar generated here.
+ // Just exit, no action.
+ if ctxRepo.CustomAvatarRelativePath() == "" {
+ log.Trace("No avatar was uploaded for repo: %d. Default icon will appear instead.", ctxRepo.ID)
+ }
+ return nil
+ }
+
+ r, err := form.Avatar.Open()
+ if err != nil {
+ return fmt.Errorf("Avatar.Open: %w", err)
+ }
+ defer r.Close()
+
+ if form.Avatar.Size > setting.Avatar.MaxFileSize {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_is_too_big", form.Avatar.Size/1024, setting.Avatar.MaxFileSize/1024))
+ }
+
+ data, err := io.ReadAll(r)
+ if err != nil {
+ return fmt.Errorf("io.ReadAll: %w", err)
+ }
+ st := typesniffer.DetectContentType(data)
+ if !(st.IsImage() && !st.IsSvgImage()) {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_not_a_image"))
+ }
+ if err = repo_service.UploadAvatar(ctx, ctxRepo, data); err != nil {
+ return fmt.Errorf("UploadAvatar: %w", err)
+ }
+ return nil
+}
+
+// SettingsAvatar save new POSTed repository avatar
+func SettingsAvatar(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AvatarForm)
+ form.Source = forms.AvatarLocal
+ if err := UpdateAvatarSetting(ctx, *form); err != nil {
+ ctx.Flash.Error(err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_avatar_success"))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+}
+
+// SettingsDeleteAvatar delete repository avatar
+func SettingsDeleteAvatar(ctx *context.Context) {
+ if err := repo_service.DeleteAvatar(ctx, ctx.Repo.Repository); err != nil {
+ ctx.Flash.Error(fmt.Sprintf("DeleteAvatar: %v", err))
+ }
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings")
+}
diff --git a/routers/web/repo/setting/collaboration.go b/routers/web/repo/setting/collaboration.go
new file mode 100644
index 0000000..75b5515
--- /dev/null
+++ b/routers/web/repo/setting/collaboration.go
@@ -0,0 +1,217 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/mailer"
+ org_service "code.gitea.io/gitea/services/org"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// Collaboration render a repository's collaboration page
+func Collaboration(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.collaboration")
+ ctx.Data["PageIsSettingsCollaboration"] = true
+
+ users, err := repo_model.GetCollaborators(ctx, ctx.Repo.Repository.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetCollaborators", err)
+ return
+ }
+ ctx.Data["Collaborators"] = users
+
+ teams, err := organization.GetRepoTeams(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("GetRepoTeams", err)
+ return
+ }
+ ctx.Data["Teams"] = teams
+ ctx.Data["Repo"] = ctx.Repo.Repository
+ ctx.Data["OrgID"] = ctx.Repo.Repository.OwnerID
+ ctx.Data["OrgName"] = ctx.Repo.Repository.OwnerName
+ ctx.Data["Org"] = ctx.Repo.Repository.Owner
+ ctx.Data["Units"] = unit_model.Units
+
+ ctx.HTML(http.StatusOK, tplCollaboration)
+}
+
+// CollaborationPost response for actions for a collaboration of a repository
+func CollaborationPost(ctx *context.Context) {
+ name := strings.ToLower(ctx.FormString("collaborator"))
+ if len(name) == 0 || ctx.Repo.Owner.LowerName == name {
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ u, err := user_model.GetUserByName(ctx, name)
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("form.user_not_exist"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ } else {
+ ctx.ServerError("GetUserByName", err)
+ }
+ return
+ }
+
+ if !u.IsActive {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_inactive_user"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ // Organization is not allowed to be added as a collaborator.
+ if u.IsOrganization() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.org_not_allowed_to_be_collaborator"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+
+ if got, err := repo_model.IsCollaborator(ctx, ctx.Repo.Repository.ID, u.ID); err == nil && got {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_duplicate"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ // find the owner team of the organization the repo belongs too and
+ // check if the user we're trying to add is an owner.
+ if ctx.Repo.Repository.Owner.IsOrganization() {
+ if isOwner, err := organization.IsOrganizationOwner(ctx, ctx.Repo.Repository.Owner.ID, u.ID); err != nil {
+ ctx.ServerError("IsOrganizationOwner", err)
+ return
+ } else if isOwner {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_owner"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ return
+ }
+ }
+
+ if err = repo_module.AddCollaborator(ctx, ctx.Repo.Repository, u); err != nil {
+ if !errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.ServerError("AddCollaborator", err)
+ return
+ }
+
+ // To give an good error message, be precise on who has blocked who.
+ if blockedOurs := user_model.IsBlocked(ctx, ctx.Repo.Repository.OwnerID, u.ID); blockedOurs {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_blocked_our"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_collaborator_blocked_them"))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if setting.Service.EnableNotifyMail {
+ mailer.SendCollaboratorMail(u, ctx.Doer, ctx.Repo.Repository)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_collaborator_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+}
+
+// ChangeCollaborationAccessMode response for changing access of a collaboration
+func ChangeCollaborationAccessMode(ctx *context.Context) {
+ if err := repo_model.ChangeCollaborationAccessMode(
+ ctx,
+ ctx.Repo.Repository,
+ ctx.FormInt64("uid"),
+ perm.AccessMode(ctx.FormInt("mode"))); err != nil {
+ log.Error("ChangeCollaborationAccessMode: %v", err)
+ }
+}
+
+// DeleteCollaboration delete a collaboration for a repository
+func DeleteCollaboration(ctx *context.Context) {
+ if err := repo_service.DeleteCollaboration(ctx, ctx.Repo.Repository, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteCollaboration: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_collaborator_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
+
+// AddTeamPost response for adding a team to a repository
+func AddTeamPost(ctx *context.Context) {
+ if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ name := strings.ToLower(ctx.FormString("team"))
+ if len(name) == 0 {
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ team, err := organization.OrgFromUser(ctx.Repo.Owner).GetTeam(ctx, name)
+ if err != nil {
+ if organization.IsErrTeamNotExist(err) {
+ ctx.Flash.Error(ctx.Tr("form.team_not_exist"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ } else {
+ ctx.ServerError("GetTeam", err)
+ }
+ return
+ }
+
+ if team.OrgID != ctx.Repo.Repository.OwnerID {
+ ctx.Flash.Error(ctx.Tr("repo.settings.team_not_in_organization"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if organization.HasTeamRepo(ctx, ctx.Repo.Repository.OwnerID, team.ID, ctx.Repo.Repository.ID) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.add_team_duplicate"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ if err = org_service.TeamAddRepository(ctx, team, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("TeamAddRepository", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_team_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
+
+// DeleteTeam response for deleting a team from a repository
+func DeleteTeam(ctx *context.Context) {
+ if !ctx.Repo.Owner.RepoAdminChangeTeamAccess && !ctx.Repo.IsOwner() {
+ ctx.Flash.Error(ctx.Tr("repo.settings.change_team_access_not_allowed"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/collaboration")
+ return
+ }
+
+ team, err := organization.GetTeamByID(ctx, ctx.FormInt64("id"))
+ if err != nil {
+ ctx.ServerError("GetTeamByID", err)
+ return
+ }
+
+ if err = repo_service.RemoveRepositoryFromTeam(ctx, team, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("team.RemoveRepositorys", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_team_success"))
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/collaboration")
+}
diff --git a/routers/web/repo/setting/default_branch.go b/routers/web/repo/setting/default_branch.go
new file mode 100644
index 0000000..881d148
--- /dev/null
+++ b/routers/web/repo/setting/default_branch.go
@@ -0,0 +1,54 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// SetDefaultBranchPost set default branch
+func SetDefaultBranchPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.branches.update_default_branch")
+ ctx.Data["PageIsSettingsBranches"] = true
+
+ repo.PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ repo := ctx.Repo.Repository
+
+ switch ctx.FormString("action") {
+ case "default_branch":
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplBranches)
+ return
+ }
+
+ branch := ctx.FormString("branch")
+ if err := repo_service.SetRepoDefaultBranch(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, branch); err != nil {
+ switch {
+ case git_model.IsErrBranchNotExist(err):
+ ctx.Status(http.StatusNotFound)
+ default:
+ ctx.ServerError("SetDefaultBranch", err)
+ }
+ return
+ }
+
+ log.Trace("Repository basic settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+ default:
+ ctx.NotFound("", nil)
+ }
+}
diff --git a/routers/web/repo/setting/deploy_key.go b/routers/web/repo/setting/deploy_key.go
new file mode 100644
index 0000000..abc3eb4
--- /dev/null
+++ b/routers/web/repo/setting/deploy_key.go
@@ -0,0 +1,109 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// DeployKeys render the deploy keys list of a repository page
+func DeployKeys(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys") + " / " + ctx.Tr("secrets.secrets")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+
+ keys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("ListDeployKeys", err)
+ return
+ }
+ ctx.Data["Deploykeys"] = keys
+
+ ctx.HTML(http.StatusOK, tplDeployKeys)
+}
+
+// DeployKeysPost response for adding a deploy key of a repository
+func DeployKeysPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AddKeyForm)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.deploy_keys")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+
+ keys, err := db.Find[asymkey_model.DeployKey](ctx, asymkey_model.ListDeployKeysOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("ListDeployKeys", err)
+ return
+ }
+ ctx.Data["Deploykeys"] = keys
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplDeployKeys)
+ return
+ }
+
+ content, err := asymkey_model.CheckPublicKeyString(form.Content)
+ if err != nil {
+ if db.IsErrSSHDisabled(err) {
+ ctx.Flash.Info(ctx.Tr("settings.ssh_disabled"))
+ } else if asymkey_model.IsErrKeyUnableVerify(err) {
+ ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key"))
+ } else if err == asymkey_model.ErrKeyIsPrivate {
+ ctx.Data["HasError"] = true
+ ctx.Data["Err_Content"] = true
+ ctx.Flash.Error(ctx.Tr("form.must_use_public_key"))
+ } else {
+ ctx.Data["HasError"] = true
+ ctx.Data["Err_Content"] = true
+ ctx.Flash.Error(ctx.Tr("form.invalid_ssh_key", err.Error()))
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
+ return
+ }
+
+ key, err := asymkey_model.AddDeployKey(ctx, ctx.Repo.Repository.ID, form.Title, content, !form.IsWritable)
+ if err != nil {
+ ctx.Data["HasError"] = true
+ switch {
+ case asymkey_model.IsErrDeployKeyAlreadyExist(err):
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_been_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrKeyAlreadyExist(err):
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_key_been_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrKeyNameAlreadyUsed(err):
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_name_used"), tplDeployKeys, &form)
+ case asymkey_model.IsErrDeployKeyNameAlreadyUsed(err):
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.settings.key_name_used"), tplDeployKeys, &form)
+ default:
+ ctx.ServerError("AddDeployKey", err)
+ }
+ return
+ }
+
+ log.Trace("Deploy key added: %d", ctx.Repo.Repository.ID)
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_key_success", key.Name))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/keys")
+}
+
+// DeleteDeployKey response for deleting a deploy key
+func DeleteDeployKey(ctx *context.Context) {
+ if err := asymkey_service.DeleteDeployKey(ctx, ctx.Doer, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteDeployKey: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.deploy_key_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/keys")
+}
diff --git a/routers/web/repo/setting/git_hooks.go b/routers/web/repo/setting/git_hooks.go
new file mode 100644
index 0000000..217a01c
--- /dev/null
+++ b/routers/web/repo/setting/git_hooks.go
@@ -0,0 +1,65 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+)
+
+// GitHooks hooks of a repository
+func GitHooks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
+ ctx.Data["PageIsSettingsGitHooks"] = true
+
+ hooks, err := ctx.Repo.GitRepo.Hooks()
+ if err != nil {
+ ctx.ServerError("Hooks", err)
+ return
+ }
+ ctx.Data["Hooks"] = hooks
+
+ ctx.HTML(http.StatusOK, tplGithooks)
+}
+
+// GitHooksEdit render for editing a hook of repository page
+func GitHooksEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.githooks")
+ ctx.Data["PageIsSettingsGitHooks"] = true
+
+ name := ctx.Params(":name")
+ hook, err := ctx.Repo.GitRepo.GetHook(name)
+ if err != nil {
+ if err == git.ErrNotValidHook {
+ ctx.NotFound("GetHook", err)
+ } else {
+ ctx.ServerError("GetHook", err)
+ }
+ return
+ }
+ ctx.Data["Hook"] = hook
+ ctx.HTML(http.StatusOK, tplGithookEdit)
+}
+
+// GitHooksEditPost response for editing a git hook of a repository
+func GitHooksEditPost(ctx *context.Context) {
+ name := ctx.Params(":name")
+ hook, err := ctx.Repo.GitRepo.GetHook(name)
+ if err != nil {
+ if err == git.ErrNotValidHook {
+ ctx.NotFound("GetHook", err)
+ } else {
+ ctx.ServerError("GetHook", err)
+ }
+ return
+ }
+ hook.Content = ctx.FormString("content")
+ if err = hook.Update(); err != nil {
+ ctx.ServerError("hook.Update", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/hooks/git")
+}
diff --git a/routers/web/repo/setting/lfs.go b/routers/web/repo/setting/lfs.go
new file mode 100644
index 0000000..7e36343
--- /dev/null
+++ b/routers/web/repo/setting/lfs.go
@@ -0,0 +1,562 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "bytes"
+ "fmt"
+ gotemplate "html/template"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "strconv"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/pipeline"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsLFS base.TplName = "repo/settings/lfs"
+ tplSettingsLFSLocks base.TplName = "repo/settings/lfs_locks"
+ tplSettingsLFSFile base.TplName = "repo/settings/lfs_file"
+ tplSettingsLFSFileFind base.TplName = "repo/settings/lfs_file_find"
+ tplSettingsLFSPointers base.TplName = "repo/settings/lfs_pointers"
+)
+
+// LFSFiles shows a repository's LFS files
+func LFSFiles(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFiles", nil)
+ return
+ }
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ total, err := git_model.CountLFSMetaObjects(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("LFSFiles", err)
+ return
+ }
+ ctx.Data["Total"] = total
+
+ pager := context.NewPagination(int(total), setting.UI.ExplorePagingNum, page, 5)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.lfs")
+ ctx.Data["PageIsSettingsLFS"] = true
+ lfsMetaObjects, err := git_model.GetLFSMetaObjects(ctx, ctx.Repo.Repository.ID, pager.Paginater.Current(), setting.UI.ExplorePagingNum)
+ if err != nil {
+ ctx.ServerError("LFSFiles", err)
+ return
+ }
+ ctx.Data["LFSFiles"] = lfsMetaObjects
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFS)
+}
+
+// LFSLocks shows a repository's LFS locks
+func LFSLocks(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSLocks", nil)
+ return
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ total, err := git_model.CountLFSLockByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ ctx.Data["Total"] = total
+
+ pager := context.NewPagination(int(total), setting.UI.ExplorePagingNum, page, 5)
+ ctx.Data["Title"] = ctx.Tr("repo.settings.lfs_locks")
+ ctx.Data["PageIsSettingsLFS"] = true
+ lfsLocks, err := git_model.GetLFSLockByRepoID(ctx, ctx.Repo.Repository.ID, pager.Paginater.Current(), setting.UI.ExplorePagingNum)
+ if err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ if err := lfsLocks.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ ctx.Data["LFSLocks"] = lfsLocks
+
+ if len(lfsLocks) == 0 {
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFSLocks)
+ return
+ }
+
+ // Clone base repo.
+ tmpBasePath, err := repo_module.CreateTemporaryPath("locks")
+ if err != nil {
+ log.Error("Failed to create temporary path: %v", err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+ defer func() {
+ if err := repo_module.RemoveTemporaryPath(tmpBasePath); err != nil {
+ log.Error("LFSLocks: RemoveTemporaryPath: %v", err)
+ }
+ }()
+
+ if err := git.Clone(ctx, ctx.Repo.Repository.RepoPath(), tmpBasePath, git.CloneRepoOptions{
+ Bare: true,
+ Shared: true,
+ }); err != nil {
+ log.Error("Failed to clone repository: %s (%v)", ctx.Repo.Repository.FullName(), err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("failed to clone repository: %s (%w)", ctx.Repo.Repository.FullName(), err))
+ return
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, tmpBasePath)
+ if err != nil {
+ log.Error("Unable to open temporary repository: %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("failed to open new temporary repository in: %s %w", tmpBasePath, err))
+ return
+ }
+ defer gitRepo.Close()
+
+ filenames := make([]string, len(lfsLocks))
+
+ for i, lock := range lfsLocks {
+ filenames[i] = lock.Path
+ }
+
+ if err := gitRepo.ReadTreeToIndex(ctx.Repo.Repository.DefaultBranch); err != nil {
+ log.Error("Unable to read the default branch to the index: %s (%v)", ctx.Repo.Repository.DefaultBranch, err)
+ ctx.ServerError("LFSLocks", fmt.Errorf("unable to read the default branch to the index: %s (%w)", ctx.Repo.Repository.DefaultBranch, err))
+ return
+ }
+
+ ctx.Data["Lockables"], err = lockablesGitAttributes(gitRepo, lfsLocks)
+ if err != nil {
+ log.Error("Unable to get lockablesGitAttributes in %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ filelist, err := gitRepo.LsFiles(filenames...)
+ if err != nil {
+ log.Error("Unable to lsfiles in %s (%v)", tmpBasePath, err)
+ ctx.ServerError("LFSLocks", err)
+ return
+ }
+
+ fileset := make(container.Set[string], len(filelist))
+ fileset.AddMultiple(filelist...)
+
+ linkable := make([]bool, len(lfsLocks))
+ for i, lock := range lfsLocks {
+ linkable[i] = fileset.Contains(lock.Path)
+ }
+ ctx.Data["Linkable"] = linkable
+
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsLFSLocks)
+}
+
+func lockablesGitAttributes(gitRepo *git.Repository, lfsLocks []*git_model.LFSLock) ([]bool, error) {
+ checker, err := gitRepo.GitAttributeChecker("", "lockable")
+ if err != nil {
+ return nil, fmt.Errorf("could not GitAttributeChecker: %w", err)
+ }
+ defer checker.Close()
+
+ lockables := make([]bool, len(lfsLocks))
+ for i, lock := range lfsLocks {
+ attrs, err := checker.CheckPath(lock.Path)
+ if err != nil {
+ return nil, fmt.Errorf("could not CheckPath(%s): %w", lock.Path, err)
+ }
+ lockables[i] = attrs["lockable"].Bool().Value()
+ }
+ return lockables, nil
+}
+
+// LFSLockFile locks a file
+func LFSLockFile(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSLocks", nil)
+ return
+ }
+ originalPath := ctx.FormString("path")
+ lockPath := originalPath
+ if len(lockPath) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_locking_path", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ if lockPath[len(lockPath)-1] == '/' {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_lock_directory", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ lockPath = util.PathJoinRel(lockPath)
+ if len(lockPath) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_invalid_locking_path", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+
+ _, err := git_model.CreateLFSLock(ctx, ctx.Repo.Repository, &git_model.LFSLock{
+ Path: lockPath,
+ OwnerID: ctx.Doer.ID,
+ })
+ if err != nil {
+ if git_model.IsErrLFSLockAlreadyExist(err) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.lfs_lock_already_exists", originalPath))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+ return
+ }
+ ctx.ServerError("LFSLockFile", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+}
+
+// LFSUnlock forcibly unlocks an LFS lock
+func LFSUnlock(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSUnlock", nil)
+ return
+ }
+ _, err := git_model.DeleteLFSLockByID(ctx, ctx.ParamsInt64("lid"), ctx.Repo.Repository, ctx.Doer, true)
+ if err != nil {
+ ctx.ServerError("LFSUnlock", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs/locks")
+}
+
+// LFSFileGet serves a single LFS file
+func LFSFileGet(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+ oid := ctx.Params("oid")
+
+ p := lfs.Pointer{Oid: oid}
+ if !p.IsValid() {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+
+ ctx.Data["Title"] = oid
+ ctx.Data["PageIsSettingsLFS"] = true
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, oid)
+ if err != nil {
+ if err == git_model.ErrLFSObjectNotExist {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.ServerError("LFSFileGet", err)
+ return
+ }
+ ctx.Data["LFSFile"] = meta
+ dataRc, err := lfs.ReadMetaObject(meta.Pointer)
+ if err != nil {
+ ctx.ServerError("LFSFileGet", err)
+ return
+ }
+ defer dataRc.Close()
+ buf := make([]byte, 1024)
+ n, err := util.ReadAtMost(dataRc, buf)
+ if err != nil {
+ ctx.ServerError("Data", err)
+ return
+ }
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ ctx.Data["IsTextFile"] = st.IsText()
+ isRepresentableAsText := st.IsRepresentableAsText()
+
+ fileSize := meta.Size
+ ctx.Data["FileSize"] = meta.Size
+ ctx.Data["RawFileLink"] = fmt.Sprintf("%s%s/%s.git/info/lfs/objects/%s/%s", setting.AppURL, url.PathEscape(ctx.Repo.Repository.OwnerName), url.PathEscape(ctx.Repo.Repository.Name), url.PathEscape(meta.Oid), "direct")
+ switch {
+ case isRepresentableAsText:
+ if st.IsSvgImage() {
+ ctx.Data["IsImageFile"] = true
+ }
+
+ if fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ // Building code view blocks with line number on server side.
+ escapedContent := &bytes.Buffer{}
+ ctx.Data["EscapeStatus"], _ = charset.EscapeControlReader(rd, escapedContent, ctx.Locale, charset.FileviewContext)
+
+ var output bytes.Buffer
+ lines := strings.Split(escapedContent.String(), "\n")
+ // Remove blank line at the end of file
+ if len(lines) > 0 && lines[len(lines)-1] == "" {
+ lines = lines[:len(lines)-1]
+ }
+ for index, line := range lines {
+ line = gotemplate.HTMLEscapeString(line)
+ if index != len(lines)-1 {
+ line += "\n"
+ }
+ output.WriteString(fmt.Sprintf(`<li class="L%d" rel="L%d">%s</li>`, index+1, index+1, line))
+ }
+ ctx.Data["FileContent"] = gotemplate.HTML(output.String())
+
+ output.Reset()
+ for i := 0; i < len(lines); i++ {
+ output.WriteString(fmt.Sprintf(`<span id="L%d">%d</span>`, i+1, i+1))
+ }
+ ctx.Data["LineNums"] = gotemplate.HTML(output.String())
+
+ case st.IsPDF():
+ ctx.Data["IsPDFFile"] = true
+ case st.IsVideo():
+ ctx.Data["IsVideoFile"] = true
+ case st.IsAudio():
+ ctx.Data["IsAudioFile"] = true
+ case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()):
+ ctx.Data["IsImageFile"] = true
+ }
+ ctx.HTML(http.StatusOK, tplSettingsLFSFile)
+}
+
+// LFSDelete disassociates the provided oid from the repository and if the lfs file is no longer associated with any repositories - deletes it
+func LFSDelete(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSDelete", nil)
+ return
+ }
+ oid := ctx.Params("oid")
+ p := lfs.Pointer{Oid: oid}
+ if !p.IsValid() {
+ ctx.NotFound("LFSDelete", nil)
+ return
+ }
+
+ count, err := git_model.RemoveLFSMetaObjectByOid(ctx, ctx.Repo.Repository.ID, oid)
+ if err != nil {
+ ctx.ServerError("LFSDelete", err)
+ return
+ }
+ // FIXME: Warning: the LFS store is not locked - and can't be locked - there could be a race condition here
+ // Please note a similar condition happens in models/repo.go DeleteRepository
+ if count == 0 {
+ oidPath := path.Join(oid[0:2], oid[2:4], oid[4:])
+ err = storage.LFS.Delete(oidPath)
+ if err != nil {
+ ctx.ServerError("LFSDelete", err)
+ return
+ }
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs")
+}
+
+// LFSFileFind guesses a sha for the provided oid (or uses the provided sha) and then finds the commits that contain this sha
+func LFSFileFind(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFind", nil)
+ return
+ }
+ oid := ctx.FormString("oid")
+ size := ctx.FormInt64("size")
+ if len(oid) == 0 || size == 0 {
+ ctx.NotFound("LFSFind", nil)
+ return
+ }
+ sha := ctx.FormString("sha")
+ ctx.Data["Title"] = oid
+ ctx.Data["PageIsSettingsLFS"] = true
+ objectFormat := ctx.Repo.GetObjectFormat()
+ var objectID git.ObjectID
+ if len(sha) == 0 {
+ pointer := lfs.Pointer{Oid: oid, Size: size}
+ objectID = git.ComputeBlobHash(objectFormat, []byte(pointer.StringContent()))
+ sha = objectID.String()
+ } else {
+ objectID = git.MustIDFromString(sha)
+ }
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+ ctx.Data["Oid"] = oid
+ ctx.Data["Size"] = size
+ ctx.Data["SHA"] = sha
+
+ results, err := pipeline.FindLFSFile(ctx.Repo.GitRepo, objectID)
+ if err != nil && err != io.EOF {
+ log.Error("Failure in FindLFSFile: %v", err)
+ ctx.ServerError("LFSFind: FindLFSFile.", err)
+ return
+ }
+
+ ctx.Data["Results"] = results
+ ctx.HTML(http.StatusOK, tplSettingsLFSFileFind)
+}
+
+// LFSPointerFiles will search the repository for pointer files and report which are missing LFS files in the content store
+func LFSPointerFiles(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSFileGet", nil)
+ return
+ }
+ ctx.Data["PageIsSettingsLFS"] = true
+ ctx.Data["LFSFilesLink"] = ctx.Repo.RepoLink + "/settings/lfs"
+
+ var err error
+ err = func() error {
+ pointerChan := make(chan lfs.PointerBlob)
+ errChan := make(chan error, 1)
+ go lfs.SearchPointerBlobs(ctx, ctx.Repo.GitRepo, pointerChan, errChan)
+
+ numPointers := 0
+ var numAssociated, numNoExist, numAssociatable int
+
+ type pointerResult struct {
+ SHA string
+ Oid string
+ Size int64
+ InRepo bool
+ Exists bool
+ Accessible bool
+ Associatable bool
+ }
+
+ results := []pointerResult{}
+
+ contentStore := lfs.NewContentStore()
+ repo := ctx.Repo.Repository
+
+ for pointerBlob := range pointerChan {
+ numPointers++
+
+ result := pointerResult{
+ SHA: pointerBlob.Hash,
+ Oid: pointerBlob.Oid,
+ Size: pointerBlob.Size,
+ }
+
+ if _, err := git_model.GetLFSMetaObjectByOid(ctx, repo.ID, pointerBlob.Oid); err != nil {
+ if err != git_model.ErrLFSObjectNotExist {
+ return err
+ }
+ } else {
+ result.InRepo = true
+ }
+
+ result.Exists, err = contentStore.Exists(pointerBlob.Pointer)
+ if err != nil {
+ return err
+ }
+
+ if result.Exists {
+ if !result.InRepo {
+ // Can we fix?
+ // OK well that's "simple"
+ // - we need to check whether current user has access to a repo that has access to the file
+ result.Associatable, err = git_model.LFSObjectAccessible(ctx, ctx.Doer, pointerBlob.Oid)
+ if err != nil {
+ return err
+ }
+ if !result.Associatable {
+ associated, err := git_model.ExistsLFSObject(ctx, pointerBlob.Oid)
+ if err != nil {
+ return err
+ }
+ result.Associatable = !associated
+ }
+ }
+ }
+
+ result.Accessible = result.InRepo || result.Associatable
+
+ if result.InRepo {
+ numAssociated++
+ }
+ if !result.Exists {
+ numNoExist++
+ }
+ if result.Associatable {
+ numAssociatable++
+ }
+
+ results = append(results, result)
+ }
+
+ err, has := <-errChan
+ if has {
+ return err
+ }
+
+ ctx.Data["Pointers"] = results
+ ctx.Data["NumPointers"] = numPointers
+ ctx.Data["NumAssociated"] = numAssociated
+ ctx.Data["NumAssociatable"] = numAssociatable
+ ctx.Data["NumNoExist"] = numNoExist
+ ctx.Data["NumNotAssociated"] = numPointers - numAssociated
+
+ return nil
+ }()
+ if err != nil {
+ ctx.ServerError("LFSPointerFiles", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsLFSPointers)
+}
+
+// LFSAutoAssociate auto associates accessible lfs files
+func LFSAutoAssociate(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.NotFound("LFSAutoAssociate", nil)
+ return
+ }
+ oids := ctx.FormStrings("oid")
+ metas := make([]*git_model.LFSMetaObject, len(oids))
+ for i, oid := range oids {
+ idx := strings.IndexRune(oid, ' ')
+ if idx < 0 || idx+1 > len(oid) {
+ ctx.ServerError("LFSAutoAssociate", fmt.Errorf("illegal oid input: %s", oid))
+ return
+ }
+ var err error
+ metas[i] = &git_model.LFSMetaObject{}
+ metas[i].Size, err = strconv.ParseInt(oid[idx+1:], 10, 64)
+ if err != nil {
+ ctx.ServerError("LFSAutoAssociate", fmt.Errorf("illegal oid input: %s %w", oid, err))
+ return
+ }
+ metas[i].Oid = oid[:idx]
+ // metas[i].RepositoryID = ctx.Repo.Repository.ID
+ }
+ if err := git_model.LFSAutoAssociate(ctx, metas, ctx.Doer, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("LFSAutoAssociate", err)
+ return
+ }
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/lfs")
+}
diff --git a/routers/web/repo/setting/main_test.go b/routers/web/repo/setting/main_test.go
new file mode 100644
index 0000000..c414b85
--- /dev/null
+++ b/routers/web/repo/setting/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/repo/setting/protected_branch.go b/routers/web/repo/setting/protected_branch.go
new file mode 100644
index 0000000..b2f5798
--- /dev/null
+++ b/routers/web/repo/setting/protected_branch.go
@@ -0,0 +1,347 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "time"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ pull_service "code.gitea.io/gitea/services/pull"
+ "code.gitea.io/gitea/services/repository"
+
+ "github.com/gobwas/glob"
+)
+
+const (
+ tplProtectedBranch base.TplName = "repo/settings/protected_branch"
+)
+
+// ProtectedBranchRules render the page to protect the repository
+func ProtectedBranchRules(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.branches")
+ ctx.Data["PageIsSettingsBranches"] = true
+
+ rules, err := git_model.FindRepoProtectedBranchRules(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetProtectedBranches", err)
+ return
+ }
+ ctx.Data["ProtectedBranches"] = rules
+
+ repo.PrepareBranchList(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplBranches)
+}
+
+// SettingsProtectedBranch renders the protected branch setting page
+func SettingsProtectedBranch(c *context.Context) {
+ ruleName := c.FormString("rule_name")
+ var rule *git_model.ProtectedBranch
+ if ruleName != "" {
+ var err error
+ rule, err = git_model.GetProtectedBranchRuleByName(c, c.Repo.Repository.ID, ruleName)
+ if err != nil {
+ c.ServerError("GetProtectBranchOfRepoByName", err)
+ return
+ }
+ }
+
+ if rule == nil {
+ // No options found, create defaults.
+ rule = &git_model.ProtectedBranch{}
+ }
+
+ c.Data["PageIsSettingsBranches"] = true
+ c.Data["Title"] = c.Locale.TrString("repo.settings.protected_branch") + " - " + rule.RuleName
+
+ users, err := access_model.GetRepoReaders(c, c.Repo.Repository)
+ if err != nil {
+ c.ServerError("Repo.Repository.GetReaders", err)
+ return
+ }
+ c.Data["Users"] = users
+ c.Data["whitelist_users"] = strings.Join(base.Int64sToStrings(rule.WhitelistUserIDs), ",")
+ c.Data["merge_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistUserIDs), ",")
+ c.Data["approvals_whitelist_users"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistUserIDs), ",")
+ c.Data["status_check_contexts"] = strings.Join(rule.StatusCheckContexts, "\n")
+ contexts, _ := git_model.FindRepoRecentCommitStatusContexts(c, c.Repo.Repository.ID, 7*24*time.Hour) // Find last week status check contexts
+ c.Data["recent_status_checks"] = contexts
+
+ if c.Repo.Owner.IsOrganization() {
+ teams, err := organization.OrgFromUser(c.Repo.Owner).TeamsWithAccessToRepo(c, c.Repo.Repository.ID, perm.AccessModeRead)
+ if err != nil {
+ c.ServerError("Repo.Owner.TeamsWithAccessToRepo", err)
+ return
+ }
+ c.Data["Teams"] = teams
+ c.Data["whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.WhitelistTeamIDs), ",")
+ c.Data["merge_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.MergeWhitelistTeamIDs), ",")
+ c.Data["approvals_whitelist_teams"] = strings.Join(base.Int64sToStrings(rule.ApprovalsWhitelistTeamIDs), ",")
+ }
+
+ c.Data["Rule"] = rule
+ c.HTML(http.StatusOK, tplProtectedBranch)
+}
+
+// SettingsProtectedBranchPost updates the protected branch settings
+func SettingsProtectedBranchPost(ctx *context.Context) {
+ f := web.GetForm(ctx).(*forms.ProtectBranchForm)
+ var protectBranch *git_model.ProtectedBranch
+ if f.RuleName == "" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit", ctx.Repo.RepoLink))
+ return
+ }
+
+ var err error
+ if f.RuleID > 0 {
+ // If the RuleID isn't 0, it must be an edit operation. So we get rule by id.
+ protectBranch, err = git_model.GetProtectedBranchRuleByID(ctx, ctx.Repo.Repository.ID, f.RuleID)
+ if err != nil {
+ ctx.ServerError("GetProtectBranchOfRepoByID", err)
+ return
+ }
+ if protectBranch != nil && protectBranch.RuleName != f.RuleName {
+ // RuleName changed. We need to check if there is a rule with the same name.
+ // If a rule with the same name exists, an error should be returned.
+ sameNameProtectBranch, err := git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, f.RuleName)
+ if err != nil {
+ ctx.ServerError("GetProtectBranchOfRepoByName", err)
+ return
+ }
+ if sameNameProtectBranch != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_duplicate_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+ return
+ }
+ }
+ } else {
+ // Check if a rule already exists with this rulename, if so redirect to it.
+ protectBranch, err = git_model.GetProtectedBranchRuleByName(ctx, ctx.Repo.Repository.ID, f.RuleName)
+ if err != nil {
+ ctx.ServerError("GetProtectedBranchRuleByName", err)
+ return
+ }
+ if protectBranch != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_duplicate_rule_name"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+ return
+ }
+ }
+ if protectBranch == nil {
+ // No options found, create defaults.
+ protectBranch = &git_model.ProtectedBranch{
+ RepoID: ctx.Repo.Repository.ID,
+ RuleName: f.RuleName,
+ }
+ }
+
+ var whitelistUsers, whitelistTeams, mergeWhitelistUsers, mergeWhitelistTeams, approvalsWhitelistUsers, approvalsWhitelistTeams []int64
+ protectBranch.RuleName = f.RuleName
+ if f.RequiredApprovals < 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protected_branch_required_approvals_min"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, f.RuleName))
+ return
+ }
+
+ switch f.EnablePush {
+ case "all":
+ protectBranch.CanPush = true
+ protectBranch.EnableWhitelist = false
+ protectBranch.WhitelistDeployKeys = false
+ case "whitelist":
+ protectBranch.CanPush = true
+ protectBranch.EnableWhitelist = true
+ protectBranch.WhitelistDeployKeys = f.WhitelistDeployKeys
+ if strings.TrimSpace(f.WhitelistUsers) != "" {
+ whitelistUsers, _ = base.StringsToInt64s(strings.Split(f.WhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.WhitelistTeams) != "" {
+ whitelistTeams, _ = base.StringsToInt64s(strings.Split(f.WhitelistTeams, ","))
+ }
+ default:
+ protectBranch.CanPush = false
+ protectBranch.EnableWhitelist = false
+ protectBranch.WhitelistDeployKeys = false
+ }
+
+ protectBranch.EnableMergeWhitelist = f.EnableMergeWhitelist
+ if f.EnableMergeWhitelist {
+ if strings.TrimSpace(f.MergeWhitelistUsers) != "" {
+ mergeWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.MergeWhitelistTeams) != "" {
+ mergeWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.MergeWhitelistTeams, ","))
+ }
+ }
+
+ protectBranch.EnableStatusCheck = f.EnableStatusCheck
+ if f.EnableStatusCheck {
+ patterns := strings.Split(strings.ReplaceAll(f.StatusCheckContexts, "\r", "\n"), "\n")
+ validPatterns := make([]string, 0, len(patterns))
+ for _, pattern := range patterns {
+ trimmed := strings.TrimSpace(pattern)
+ if trimmed == "" {
+ continue
+ }
+ if _, err := glob.Compile(trimmed); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.protect_invalid_status_check_pattern", pattern))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, url.QueryEscape(protectBranch.RuleName)))
+ return
+ }
+ validPatterns = append(validPatterns, trimmed)
+ }
+ if len(validPatterns) == 0 {
+ // if status check is enabled, patterns slice is not allowed to be empty
+ ctx.Flash.Error(ctx.Tr("repo.settings.protect_no_valid_status_check_patterns"))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches/edit?rule_name=%s", ctx.Repo.RepoLink, url.QueryEscape(protectBranch.RuleName)))
+ return
+ }
+ protectBranch.StatusCheckContexts = validPatterns
+ } else {
+ protectBranch.StatusCheckContexts = nil
+ }
+
+ protectBranch.RequiredApprovals = f.RequiredApprovals
+ protectBranch.EnableApprovalsWhitelist = f.EnableApprovalsWhitelist
+ if f.EnableApprovalsWhitelist {
+ if strings.TrimSpace(f.ApprovalsWhitelistUsers) != "" {
+ approvalsWhitelistUsers, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistUsers, ","))
+ }
+ if strings.TrimSpace(f.ApprovalsWhitelistTeams) != "" {
+ approvalsWhitelistTeams, _ = base.StringsToInt64s(strings.Split(f.ApprovalsWhitelistTeams, ","))
+ }
+ }
+ protectBranch.BlockOnRejectedReviews = f.BlockOnRejectedReviews
+ protectBranch.BlockOnOfficialReviewRequests = f.BlockOnOfficialReviewRequests
+ protectBranch.DismissStaleApprovals = f.DismissStaleApprovals
+ protectBranch.IgnoreStaleApprovals = f.IgnoreStaleApprovals
+ protectBranch.RequireSignedCommits = f.RequireSignedCommits
+ protectBranch.ProtectedFilePatterns = f.ProtectedFilePatterns
+ protectBranch.UnprotectedFilePatterns = f.UnprotectedFilePatterns
+ protectBranch.BlockOnOutdatedBranch = f.BlockOnOutdatedBranch
+ protectBranch.ApplyToAdmins = f.ApplyToAdmins
+
+ err = git_model.UpdateProtectBranch(ctx, ctx.Repo.Repository, protectBranch, git_model.WhitelistOptions{
+ UserIDs: whitelistUsers,
+ TeamIDs: whitelistTeams,
+ MergeUserIDs: mergeWhitelistUsers,
+ MergeTeamIDs: mergeWhitelistTeams,
+ ApprovalsUserIDs: approvalsWhitelistUsers,
+ ApprovalsTeamIDs: approvalsWhitelistTeams,
+ })
+ if err != nil {
+ ctx.ServerError("UpdateProtectBranch", err)
+ return
+ }
+
+ // FIXME: since we only need to recheck files protected rules, we could improve this
+ matchedBranches, err := git_model.FindAllMatchedBranches(ctx, ctx.Repo.Repository.ID, protectBranch.RuleName)
+ if err != nil {
+ ctx.ServerError("FindAllMatchedBranches", err)
+ return
+ }
+ for _, branchName := range matchedBranches {
+ if err = pull_service.CheckPRsForBaseBranch(ctx, ctx.Repo.Repository, branchName); err != nil {
+ ctx.ServerError("CheckPRsForBaseBranch", err)
+ return
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_protect_branch_success", protectBranch.RuleName))
+ ctx.Redirect(fmt.Sprintf("%s/settings/branches?rule_name=%s", ctx.Repo.RepoLink, protectBranch.RuleName))
+}
+
+// DeleteProtectedBranchRulePost delete protected branch rule by id
+func DeleteProtectedBranchRulePost(ctx *context.Context) {
+ ruleID := ctx.ParamsInt64("id")
+ if ruleID <= 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ rule, err := git_model.GetProtectedBranchRuleByID(ctx, ctx.Repo.Repository.ID, ruleID)
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if rule == nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", fmt.Sprintf("%d", ruleID)))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if err := git_model.DeleteProtectedBranch(ctx, ctx.Repo.Repository, ruleID); err != nil {
+ ctx.Flash.Error(ctx.Tr("repo.settings.remove_protected_branch_failed", rule.RuleName))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.remove_protected_branch_success", rule.RuleName))
+ ctx.JSONRedirect(fmt.Sprintf("%s/settings/branches", ctx.Repo.RepoLink))
+}
+
+// RenameBranchPost responses for rename a branch
+func RenameBranchPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RenameBranchForm)
+
+ if !ctx.Repo.CanCreateBranch() {
+ ctx.NotFound("RenameBranch", nil)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.GetErrMsg())
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ msg, err := repository.RenameBranch(ctx, ctx.Repo.Repository, ctx.Doer, ctx.Repo.GitRepo, form.From, form.To)
+ if err != nil {
+ if errors.Is(err, git_model.ErrBranchIsProtected) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_protected", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ } else if git_model.IsErrBranchAlreadyExists(err) {
+ ctx.Flash.Error(ctx.Tr("repo.branch.branch_already_exists", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ } else {
+ ctx.ServerError("RenameBranch", err)
+ }
+ return
+ }
+
+ if msg == "target_exist" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_exist", form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ if msg == "from_not_exist" {
+ ctx.Flash.Error(ctx.Tr("repo.settings.rename_branch_failed_not_exist", form.From))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.rename_branch_success", form.From, form.To))
+ ctx.Redirect(fmt.Sprintf("%s/branches", ctx.Repo.RepoLink))
+}
diff --git a/routers/web/repo/setting/protected_tag.go b/routers/web/repo/setting/protected_tag.go
new file mode 100644
index 0000000..2c25b65
--- /dev/null
+++ b/routers/web/repo/setting/protected_tag.go
@@ -0,0 +1,188 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/http"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplTags base.TplName = "repo/settings/tags"
+)
+
+// Tags render the page to protect tags
+func ProtectedTags(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplTags)
+}
+
+// NewProtectedTagPost handles creation of a protect tag
+func NewProtectedTagPost(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTags)
+ return
+ }
+
+ repo := ctx.Repo.Repository
+ form := web.GetForm(ctx).(*forms.ProtectTagForm)
+
+ pt := &git_model.ProtectedTag{
+ RepoID: repo.ID,
+ NamePattern: strings.TrimSpace(form.NamePattern),
+ }
+
+ if strings.TrimSpace(form.AllowlistUsers) != "" {
+ pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ","))
+ }
+ if strings.TrimSpace(form.AllowlistTeams) != "" {
+ pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ","))
+ }
+
+ if err := git_model.InsertProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("InsertProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(setting.AppSubURL + ctx.Req.URL.EscapedPath())
+}
+
+// EditProtectedTag render the page to edit a protect tag
+func EditProtectedTag(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.Data["PageIsEditProtectedTag"] = true
+
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ ctx.Data["name_pattern"] = pt.NamePattern
+ ctx.Data["allowlist_users"] = strings.Join(base.Int64sToStrings(pt.AllowlistUserIDs), ",")
+ ctx.Data["allowlist_teams"] = strings.Join(base.Int64sToStrings(pt.AllowlistTeamIDs), ",")
+
+ ctx.HTML(http.StatusOK, tplTags)
+}
+
+// EditProtectedTagPost handles creation of a protect tag
+func EditProtectedTagPost(ctx *context.Context) {
+ if setTagsContext(ctx) != nil {
+ return
+ }
+
+ ctx.Data["PageIsEditProtectedTag"] = true
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplTags)
+ return
+ }
+
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.ProtectTagForm)
+
+ pt.NamePattern = strings.TrimSpace(form.NamePattern)
+ pt.AllowlistUserIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistUsers, ","))
+ pt.AllowlistTeamIDs, _ = base.StringsToInt64s(strings.Split(form.AllowlistTeams, ","))
+
+ if err := git_model.UpdateProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("UpdateProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags")
+}
+
+// DeleteProtectedTagPost handles deletion of a protected tag
+func DeleteProtectedTagPost(ctx *context.Context) {
+ pt := selectProtectedTagByContext(ctx)
+ if pt == nil {
+ return
+ }
+
+ if err := git_model.DeleteProtectedTag(ctx, pt); err != nil {
+ ctx.ServerError("DeleteProtectedTag", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.Repository.Link() + "/settings/tags")
+}
+
+func setTagsContext(ctx *context.Context) error {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.tags")
+ ctx.Data["PageIsSettingsTags"] = true
+
+ protectedTags, err := git_model.GetProtectedTags(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ ctx.ServerError("GetProtectedTags", err)
+ return err
+ }
+ ctx.Data["ProtectedTags"] = protectedTags
+
+ users, err := access_model.GetRepoReaders(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("Repo.Repository.GetReaders", err)
+ return err
+ }
+ ctx.Data["Users"] = users
+
+ if ctx.Repo.Owner.IsOrganization() {
+ teams, err := organization.OrgFromUser(ctx.Repo.Owner).TeamsWithAccessToRepo(ctx, ctx.Repo.Repository.ID, perm.AccessModeRead)
+ if err != nil {
+ ctx.ServerError("Repo.Owner.TeamsWithAccessToRepo", err)
+ return err
+ }
+ ctx.Data["Teams"] = teams
+ }
+
+ return nil
+}
+
+func selectProtectedTagByContext(ctx *context.Context) *git_model.ProtectedTag {
+ id := ctx.FormInt64("id")
+ if id == 0 {
+ id = ctx.ParamsInt64(":id")
+ }
+
+ tag, err := git_model.GetProtectedTagByID(ctx, id)
+ if err != nil {
+ ctx.ServerError("GetProtectedTagByID", err)
+ return nil
+ }
+
+ if tag != nil && tag.RepoID == ctx.Repo.Repository.ID {
+ return tag
+ }
+
+ ctx.NotFound("", fmt.Errorf("ProtectedTag[%v] not associated to repository %v", id, ctx.Repo.Repository))
+
+ return nil
+}
diff --git a/routers/web/repo/setting/runners.go b/routers/web/repo/setting/runners.go
new file mode 100644
index 0000000..a47d3b4
--- /dev/null
+++ b/routers/web/repo/setting/runners.go
@@ -0,0 +1,187 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+ "net/url"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ actions_shared "code.gitea.io/gitea/routers/web/shared/actions"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // TODO: Separate secrets from runners when layout is ready
+ tplRepoRunners base.TplName = "repo/settings/actions"
+ tplOrgRunners base.TplName = "org/settings/actions"
+ tplAdminRunners base.TplName = "admin/actions"
+ tplUserRunners base.TplName = "user/settings/actions"
+ tplRepoRunnerEdit base.TplName = "repo/settings/runner_edit"
+ tplOrgRunnerEdit base.TplName = "org/settings/runners_edit"
+ tplAdminRunnerEdit base.TplName = "admin/runners/edit"
+ tplUserRunnerEdit base.TplName = "user/settings/runner_edit"
+)
+
+type runnersCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsAdmin bool
+ IsUser bool
+ RunnersTemplate base.TplName
+ RunnerEditTemplate base.TplName
+ RedirectLink string
+}
+
+func getRunnersCtx(ctx *context.Context) (*runnersCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &runnersCtx{
+ RepoID: ctx.Repo.Repository.ID,
+ OwnerID: 0,
+ IsRepo: true,
+ RunnersTemplate: tplRepoRunners,
+ RunnerEditTemplate: tplRepoRunnerEdit,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &runnersCtx{
+ RepoID: 0,
+ OwnerID: ctx.Org.Organization.ID,
+ IsOrg: true,
+ RunnersTemplate: tplOrgRunners,
+ RunnerEditTemplate: tplOrgRunnerEdit,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &runnersCtx{
+ RepoID: 0,
+ OwnerID: 0,
+ IsAdmin: true,
+ RunnersTemplate: tplAdminRunners,
+ RunnerEditTemplate: tplAdminRunnerEdit,
+ RedirectLink: setting.AppSubURL + "/admin/actions/runners/",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &runnersCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ RunnersTemplate: tplUserRunners,
+ RunnerEditTemplate: tplUserRunnerEdit,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/runners/",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Runners context")
+}
+
+// Runners render settings/actions/runners page for repo level
+func Runners(ctx *context.Context) {
+ ctx.Data["PageIsSharedSettingsRunners"] = true
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageType"] = "runners"
+
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ opts := actions_model.FindRunnerOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: 100,
+ },
+ Sort: ctx.Req.URL.Query().Get("sort"),
+ Filter: ctx.Req.URL.Query().Get("q"),
+ }
+ if rCtx.IsRepo {
+ opts.RepoID = rCtx.RepoID
+ opts.WithAvailable = true
+ } else if rCtx.IsOrg || rCtx.IsUser {
+ opts.OwnerID = rCtx.OwnerID
+ opts.WithAvailable = true
+ }
+ actions_shared.RunnersList(ctx, opts)
+
+ ctx.HTML(http.StatusOK, rCtx.RunnersTemplate)
+}
+
+// RunnersEdit renders runner edit page for repository level
+func RunnersEdit(ctx *context.Context) {
+ ctx.Data["PageIsSharedSettingsRunners"] = true
+ ctx.Data["Title"] = ctx.Tr("actions.runners.edit_runner")
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ actions_shared.RunnerDetails(ctx, page,
+ ctx.ParamsInt64(":runnerid"), rCtx.OwnerID, rCtx.RepoID,
+ )
+ ctx.HTML(http.StatusOK, rCtx.RunnerEditTemplate)
+}
+
+func RunnersEditPost(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerDetailsEditPost(ctx, ctx.ParamsInt64(":runnerid"),
+ rCtx.OwnerID, rCtx.RepoID,
+ rCtx.RedirectLink+url.PathEscape(ctx.Params(":runnerid")))
+}
+
+func ResetRunnerRegistrationToken(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerResetRegistrationToken(ctx, rCtx.OwnerID, rCtx.RepoID, rCtx.RedirectLink)
+}
+
+// RunnerDeletePost response for deleting runner
+func RunnerDeletePost(ctx *context.Context) {
+ rCtx, err := getRunnersCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getRunnersCtx", err)
+ return
+ }
+ actions_shared.RunnerDeletePost(ctx, ctx.ParamsInt64(":runnerid"), rCtx.RedirectLink, rCtx.RedirectLink+url.PathEscape(ctx.Params(":runnerid")))
+}
+
+func RedirectToDefaultSetting(ctx *context.Context) {
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/actions/runners")
+}
diff --git a/routers/web/repo/setting/secrets.go b/routers/web/repo/setting/secrets.go
new file mode 100644
index 0000000..d4d56bf
--- /dev/null
+++ b/routers/web/repo/setting/secrets.go
@@ -0,0 +1,127 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared "code.gitea.io/gitea/routers/web/shared/secrets"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ // TODO: Separate secrets from runners when layout is ready
+ tplRepoSecrets base.TplName = "repo/settings/actions"
+ tplOrgSecrets base.TplName = "org/settings/actions"
+ tplUserSecrets base.TplName = "user/settings/actions"
+)
+
+type secretsCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsUser bool
+ SecretsTemplate base.TplName
+ RedirectLink string
+}
+
+func getSecretsCtx(ctx *context.Context) (*secretsCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &secretsCtx{
+ OwnerID: 0,
+ RepoID: ctx.Repo.Repository.ID,
+ IsRepo: true,
+ SecretsTemplate: tplRepoSecrets,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/secrets",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &secretsCtx{
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: 0,
+ IsOrg: true,
+ SecretsTemplate: tplOrgSecrets,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/secrets",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &secretsCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ SecretsTemplate: tplUserSecrets,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/secrets",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Secrets context")
+}
+
+func Secrets(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.actions")
+ ctx.Data["PageType"] = "secrets"
+ ctx.Data["PageIsSharedSettingsSecrets"] = true
+
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+
+ if sCtx.IsRepo {
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+ }
+
+ shared.SetSecretsContext(ctx, sCtx.OwnerID, sCtx.RepoID)
+ if ctx.Written() {
+ return
+ }
+ ctx.HTML(http.StatusOK, sCtx.SecretsTemplate)
+}
+
+func SecretsPost(ctx *context.Context) {
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.PerformSecretsPost(
+ ctx,
+ sCtx.OwnerID,
+ sCtx.RepoID,
+ sCtx.RedirectLink,
+ )
+}
+
+func SecretsDelete(ctx *context.Context) {
+ sCtx, err := getSecretsCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getSecretsCtx", err)
+ return
+ }
+ shared.PerformSecretsDelete(
+ ctx,
+ sCtx.OwnerID,
+ sCtx.RepoID,
+ sCtx.RedirectLink,
+ )
+}
diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go
new file mode 100644
index 0000000..aee2e2f
--- /dev/null
+++ b/routers/web/repo/setting/setting.go
@@ -0,0 +1,1115 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ quota_model "code.gitea.io/gitea/models/quota"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/indexer/stats"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/validation"
+ "code.gitea.io/gitea/modules/web"
+ actions_service "code.gitea.io/gitea/services/actions"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/federation"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/migrations"
+ mirror_service "code.gitea.io/gitea/services/mirror"
+ repo_service "code.gitea.io/gitea/services/repository"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+)
+
+const (
+ tplSettingsOptions base.TplName = "repo/settings/options"
+ tplSettingsUnits base.TplName = "repo/settings/units"
+ tplCollaboration base.TplName = "repo/settings/collaboration"
+ tplBranches base.TplName = "repo/settings/branches"
+ tplGithooks base.TplName = "repo/settings/githooks"
+ tplGithookEdit base.TplName = "repo/settings/githook_edit"
+ tplDeployKeys base.TplName = "repo/settings/deploy_keys"
+)
+
+// SettingsCtxData is a middleware that sets all the general context data for the
+// settings template.
+func SettingsCtxData(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.options")
+ ctx.Data["PageIsSettingsOptions"] = true
+ ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+ ctx.Data["DisableNewPushMirrors"] = setting.Mirror.DisableNewPush
+ ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
+ ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
+
+ signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath())
+ ctx.Data["SigningKeyAvailable"] = len(signing) > 0
+ ctx.Data["SigningSettings"] = setting.Repository.Signing
+ ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ if ctx.Doer.IsAdmin {
+ if setting.Indexer.RepoIndexerEnabled {
+ status, err := repo_model.GetIndexerStatus(ctx, ctx.Repo.Repository, repo_model.RepoIndexerTypeCode)
+ if err != nil {
+ ctx.ServerError("repo.indexer_status", err)
+ return
+ }
+ ctx.Data["CodeIndexerStatus"] = status
+ }
+ status, err := repo_model.GetIndexerStatus(ctx, ctx.Repo.Repository, repo_model.RepoIndexerTypeStats)
+ if err != nil {
+ ctx.ServerError("repo.indexer_status", err)
+ return
+ }
+ ctx.Data["StatsIndexerStatus"] = status
+ }
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, ctx.Repo.Repository.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetPushMirrorsByRepoID", err)
+ return
+ }
+ ctx.Data["PushMirrors"] = pushMirrors
+ ctx.Data["CanUseSSHMirroring"] = git.HasSSHExecutable
+}
+
+// Units show a repositorys unit settings page
+func Units(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.units.units")
+ ctx.Data["PageIsRepoSettingsUnits"] = true
+
+ ctx.HTML(http.StatusOK, tplSettingsUnits)
+}
+
+func UnitsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RepoUnitSettingForm)
+
+ repo := ctx.Repo.Repository
+
+ var repoChanged bool
+ var units []repo_model.RepoUnit
+ var deleteUnitTypes []unit_model.Type
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ if repo.CloseIssuesViaCommitInAnyBranch != form.EnableCloseIssuesViaCommitInAnyBranch {
+ repo.CloseIssuesViaCommitInAnyBranch = form.EnableCloseIssuesViaCommitInAnyBranch
+ repoChanged = true
+ }
+
+ if form.EnableCode && !unit_model.TypeCode.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeCode,
+ })
+ } else if !unit_model.TypeCode.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeCode)
+ }
+
+ if form.EnableWiki && form.EnableExternalWiki && !unit_model.TypeExternalWiki.UnitGlobalDisabled() {
+ if !validation.IsValidExternalURL(form.ExternalWikiURL) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.external_wiki_url_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeExternalWiki,
+ Config: &repo_model.ExternalWikiConfig{
+ ExternalWikiURL: form.ExternalWikiURL,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeWiki)
+ } else if form.EnableWiki && !form.EnableExternalWiki && !unit_model.TypeWiki.UnitGlobalDisabled() {
+ var wikiPermissions repo_model.UnitAccessMode
+ if form.GloballyWriteableWiki {
+ wikiPermissions = repo_model.UnitAccessModeWrite
+ } else {
+ wikiPermissions = repo_model.UnitAccessModeRead
+ }
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeWiki,
+ Config: new(repo_model.UnitConfig),
+ DefaultPermissions: wikiPermissions,
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalWiki)
+ } else {
+ if !unit_model.TypeExternalWiki.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalWiki)
+ }
+ if !unit_model.TypeWiki.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeWiki)
+ }
+ }
+
+ if form.EnableIssues && form.EnableExternalTracker && !unit_model.TypeExternalTracker.UnitGlobalDisabled() {
+ if !validation.IsValidExternalURL(form.ExternalTrackerURL) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.external_tracker_url_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+ if len(form.TrackerURLFormat) != 0 && !validation.IsValidExternalTrackerURLFormat(form.TrackerURLFormat) {
+ ctx.Flash.Error(ctx.Tr("repo.settings.tracker_url_format_error"))
+ ctx.Redirect(repo.Link() + "/settings/units")
+ return
+ }
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeExternalTracker,
+ Config: &repo_model.ExternalTrackerConfig{
+ ExternalTrackerURL: form.ExternalTrackerURL,
+ ExternalTrackerFormat: form.TrackerURLFormat,
+ ExternalTrackerStyle: form.TrackerIssueStyle,
+ ExternalTrackerRegexpPattern: form.ExternalTrackerRegexpPattern,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeIssues)
+ } else if form.EnableIssues && !form.EnableExternalTracker && !unit_model.TypeIssues.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeIssues,
+ Config: &repo_model.IssuesConfig{
+ EnableTimetracker: form.EnableTimetracker,
+ AllowOnlyContributorsToTrackTime: form.AllowOnlyContributorsToTrackTime,
+ EnableDependencies: form.EnableIssueDependencies,
+ },
+ })
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalTracker)
+ } else {
+ if !unit_model.TypeExternalTracker.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeExternalTracker)
+ }
+ if !unit_model.TypeIssues.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeIssues)
+ }
+ }
+
+ if form.EnableProjects && !unit_model.TypeProjects.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeProjects,
+ })
+ } else if !unit_model.TypeProjects.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeProjects)
+ }
+
+ if form.EnableReleases && !unit_model.TypeReleases.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeReleases,
+ })
+ } else if !unit_model.TypeReleases.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeReleases)
+ }
+
+ if form.EnablePackages && !unit_model.TypePackages.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypePackages,
+ })
+ } else if !unit_model.TypePackages.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePackages)
+ }
+
+ if form.EnableActions && !unit_model.TypeActions.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypeActions,
+ })
+ } else if !unit_model.TypeActions.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeActions)
+ }
+
+ if form.EnablePulls && !unit_model.TypePullRequests.UnitGlobalDisabled() {
+ units = append(units, repo_model.RepoUnit{
+ RepoID: repo.ID,
+ Type: unit_model.TypePullRequests,
+ Config: &repo_model.PullRequestsConfig{
+ IgnoreWhitespaceConflicts: form.PullsIgnoreWhitespace,
+ AllowMerge: form.PullsAllowMerge,
+ AllowRebase: form.PullsAllowRebase,
+ AllowRebaseMerge: form.PullsAllowRebaseMerge,
+ AllowSquash: form.PullsAllowSquash,
+ AllowFastForwardOnly: form.PullsAllowFastForwardOnly,
+ AllowManualMerge: form.PullsAllowManualMerge,
+ AutodetectManualMerge: form.EnableAutodetectManualMerge,
+ AllowRebaseUpdate: form.PullsAllowRebaseUpdate,
+ DefaultDeleteBranchAfterMerge: form.DefaultDeleteBranchAfterMerge,
+ DefaultMergeStyle: repo_model.MergeStyle(form.PullsDefaultMergeStyle),
+ DefaultAllowMaintainerEdit: form.DefaultAllowMaintainerEdit,
+ },
+ })
+ } else if !unit_model.TypePullRequests.UnitGlobalDisabled() {
+ deleteUnitTypes = append(deleteUnitTypes, unit_model.TypePullRequests)
+ }
+
+ if len(units) == 0 {
+ ctx.Flash.Error(ctx.Tr("repo.settings.update_settings_no_unit"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/units")
+ return
+ }
+
+ if err := repo_service.UpdateRepositoryUnits(ctx, repo, units, deleteUnitTypes); err != nil {
+ ctx.ServerError("UpdateRepositoryUnits", err)
+ return
+ }
+ if repoChanged {
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ }
+ log.Trace("Repository advanced settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings/units")
+}
+
+// Settings show a repository's settings page
+func Settings(ctx *context.Context) {
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+}
+
+// SettingsPost response for changes of a repository
+func SettingsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.RepoSettingForm)
+
+ ctx.Data["ForcePrivate"] = setting.Repository.ForcePrivate
+ ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
+ ctx.Data["DisableNewPullMirrors"] = setting.Mirror.DisableNewPull
+ ctx.Data["DisableNewPushMirrors"] = setting.Mirror.DisableNewPush
+ ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
+ ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
+
+ signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath())
+ ctx.Data["SigningKeyAvailable"] = len(signing) > 0
+ ctx.Data["SigningSettings"] = setting.Repository.Signing
+ ctx.Data["CodeIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+
+ repo := ctx.Repo.Repository
+
+ switch ctx.FormString("action") {
+ case "update":
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSettingsOptions)
+ return
+ }
+
+ newRepoName := form.RepoName
+ // Check if repository name has been changed.
+ if repo.LowerName != strings.ToLower(newRepoName) {
+ // Close the GitRepo if open
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+ if err := repo_service.ChangeRepositoryName(ctx, ctx.Doer, repo, newRepoName); err != nil {
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case repo_model.IsErrRepoAlreadyExist(err):
+ ctx.RenderWithErr(ctx.Tr("form.repo_name_been_taken"), tplSettingsOptions, &form)
+ case db.IsErrNameReserved(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_reserved", err.(db.ErrNameReserved).Name), tplSettingsOptions, &form)
+ case repo_model.IsErrRepoFilesAlreadyExist(err):
+ ctx.Data["Err_RepoName"] = true
+ switch {
+ case ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories):
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt_or_delete"), tplSettingsOptions, form)
+ case setting.Repository.AllowAdoptionOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.adopt"), tplSettingsOptions, form)
+ case setting.Repository.AllowDeleteOfUnadoptedRepositories:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist.delete"), tplSettingsOptions, form)
+ default:
+ ctx.RenderWithErr(ctx.Tr("form.repository_files_already_exist"), tplSettingsOptions, form)
+ }
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.RenderWithErr(ctx.Tr("repo.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplSettingsOptions, &form)
+ default:
+ ctx.ServerError("ChangeRepositoryName", err)
+ }
+ return
+ }
+
+ log.Trace("Repository name changed: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newRepoName)
+ }
+ // In case it's just a case change.
+ repo.Name = newRepoName
+ repo.LowerName = strings.ToLower(newRepoName)
+ repo.Description = form.Description
+ repo.Website = form.Website
+ repo.IsTemplate = form.Template
+
+ // Visibility of forked repository is forced sync with base repository.
+ if repo.IsFork {
+ form.Private = repo.BaseRepo.IsPrivate || repo.BaseRepo.Owner.Visibility == structs.VisibleTypePrivate
+ }
+
+ visibilityChanged := repo.IsPrivate != form.Private
+ // when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public
+ if visibilityChanged && setting.Repository.ForcePrivate && !form.Private && !ctx.Doer.IsAdmin {
+ ctx.RenderWithErr(ctx.Tr("form.repository_force_private"), tplSettingsOptions, form)
+ return
+ }
+
+ repo.IsPrivate = form.Private
+ if err := repo_service.UpdateRepository(ctx, repo, visibilityChanged); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ log.Trace("Repository basic settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "federation":
+ if !setting.Federation.Enabled {
+ ctx.NotFound("", nil)
+ ctx.Flash.Info(ctx.Tr("repo.settings.federation_not_enabled"))
+ return
+ }
+ followingRepos := strings.TrimSpace(form.FollowingRepos)
+ followingRepos = strings.TrimSuffix(followingRepos, ";")
+
+ maxFollowingRepoStrLength := 2048
+ errs := validation.ValidateMaxLen(followingRepos, maxFollowingRepoStrLength, "federationRepos")
+ if len(errs) > 0 {
+ ctx.Data["ERR_FollowingRepos"] = true
+ ctx.Flash.Error(ctx.Tr("repo.form.string_too_long", maxFollowingRepoStrLength))
+ ctx.Redirect(repo.Link() + "/settings")
+ return
+ }
+
+ federationRepoSplit := []string{}
+ if followingRepos != "" {
+ federationRepoSplit = strings.Split(followingRepos, ";")
+ }
+ for idx, repo := range federationRepoSplit {
+ federationRepoSplit[idx] = strings.TrimSpace(repo)
+ }
+
+ if _, _, err := federation.StoreFollowingRepoList(ctx, ctx.Repo.Repository.ID, federationRepoSplit); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "mirror":
+ if !setting.Mirror.Enabled || !repo.IsMirror || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ pullMirror, err := repo_model.GetMirrorByRepoID(ctx, ctx.Repo.Repository.ID)
+ if err == repo_model.ErrMirrorNotExist {
+ ctx.NotFound("", nil)
+ return
+ }
+ if err != nil {
+ ctx.ServerError("GetMirrorByRepoID", err)
+ return
+ }
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.Interval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.Data["Err_Interval"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
+ return
+ }
+
+ pullMirror.EnablePrune = form.EnablePrune
+ pullMirror.Interval = interval
+ pullMirror.ScheduleNextUpdate()
+ if err := repo_model.UpdateMirror(ctx, pullMirror); err != nil {
+ ctx.ServerError("UpdateMirror", err)
+ return
+ }
+
+ u, err := git.GetRemoteURL(ctx, ctx.Repo.Repository.RepoPath(), pullMirror.GetRemoteName())
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ if u.User != nil && form.MirrorPassword == "" && form.MirrorUsername == u.User.Username() {
+ form.MirrorPassword, _ = u.User.Password()
+ }
+
+ address, err := forms.ParseRemoteAddr(form.MirrorAddress, form.MirrorUsername, form.MirrorPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(address, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ if err := mirror_service.UpdateAddress(ctx, pullMirror, address); err != nil {
+ ctx.ServerError("UpdateAddress", err)
+ return
+ }
+ remoteAddress, err := util.SanitizeURL(address)
+ if err != nil {
+ ctx.Data["Err_MirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ pullMirror.RemoteAddress = remoteAddress
+
+ form.LFS = form.LFS && setting.LFS.StartServer
+
+ if len(form.LFSEndpoint) > 0 {
+ ep := lfs.DetermineEndpoint("", form.LFSEndpoint)
+ if ep == nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_lfs_endpoint"), tplSettingsOptions, &form)
+ return
+ }
+ err = migrations.IsMigrateURLAllowed(ep.String(), ctx.Doer)
+ if err != nil {
+ ctx.Data["Err_LFSEndpoint"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+ }
+
+ pullMirror.LFS = form.LFS
+ pullMirror.LFSEndpoint = form.LFSEndpoint
+ if err := repo_model.UpdateMirror(ctx, pullMirror); err != nil {
+ ctx.ServerError("UpdateMirror", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "mirror-sync":
+ if !setting.Mirror.Enabled || !repo.IsMirror || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ ok, err := quota_model.EvaluateForUser(ctx, repo.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ ctx.RenderWithErr(ctx.Tr("repo.settings.pull_mirror_sync_quota_exceeded"), tplSettingsOptions, &form)
+ return
+ }
+
+ mirror_service.AddPullMirrorToQueue(repo.ID)
+
+ ctx.Flash.Info(ctx.Tr("repo.settings.pull_mirror_sync_in_progress", repo.OriginalURL))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-sync":
+ if !setting.Mirror.Enabled {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ m, err := selectPushMirrorByForm(ctx, form, repo)
+ if err != nil {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ mirror_service.AddPushMirrorToQueue(m.ID)
+
+ ctx.Flash.Info(ctx.Tr("repo.settings.push_mirror_sync_in_progress", m.RemoteAddress))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-update":
+ if !setting.Mirror.Enabled || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.PushMirrorInterval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &forms.RepoSettingForm{})
+ return
+ }
+
+ id, err := strconv.ParseInt(form.PushMirrorID, 10, 64)
+ if err != nil {
+ ctx.ServerError("UpdatePushMirrorIntervalPushMirrorID", err)
+ return
+ }
+ m := &repo_model.PushMirror{
+ ID: id,
+ Interval: interval,
+ }
+ if err := repo_model.UpdatePushMirrorInterval(ctx, m); err != nil {
+ ctx.ServerError("UpdatePushMirrorInterval", err)
+ return
+ }
+ // Background why we are adding it to Queue
+ // If we observed its implementation in the context of `push-mirror-sync` where it
+ // is evident that pushing to the queue is necessary for updates.
+ // So, there are updates within the given interval, it is necessary to update the queue accordingly.
+ mirror_service.AddPushMirrorToQueue(m.ID)
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-remove":
+ if !setting.Mirror.Enabled || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ m, err := selectPushMirrorByForm(ctx, form, repo)
+ if err != nil {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ if err = mirror_service.RemovePushMirrorRemote(ctx, m); err != nil {
+ ctx.ServerError("RemovePushMirrorRemote", err)
+ return
+ }
+
+ if err = repo_model.DeletePushMirrors(ctx, repo_model.PushMirrorOptions{ID: m.ID, RepoID: m.RepoID}); err != nil {
+ ctx.ServerError("DeletePushMirrorByID", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "push-mirror-add":
+ if setting.Mirror.DisableNewPush || repo.IsArchived {
+ ctx.NotFound("", nil)
+ return
+ }
+
+ // This section doesn't require repo_name/RepoName to be set in the form, don't show it
+ // as an error on the UI for this action
+ ctx.Data["Err_RepoName"] = nil
+
+ interval, err := time.ParseDuration(form.PushMirrorInterval)
+ if err != nil || (interval != 0 && interval < setting.Mirror.MinInterval) {
+ ctx.Data["Err_PushMirrorInterval"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_interval_invalid"), tplSettingsOptions, &form)
+ return
+ }
+
+ if form.PushMirrorUseSSH && (form.PushMirrorUsername != "" || form.PushMirrorPassword != "") {
+ ctx.Data["Err_PushMirrorUseSSH"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_denied_combination"), tplSettingsOptions, &form)
+ return
+ }
+
+ if form.PushMirrorUseSSH && !git.HasSSHExecutable {
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_use_ssh.not_available"), tplSettingsOptions, &form)
+ return
+ }
+
+ address, err := forms.ParseRemoteAddr(form.PushMirrorAddress, form.PushMirrorUsername, form.PushMirrorPassword)
+ if err == nil {
+ err = migrations.IsMigrateURLAllowed(address, ctx.Doer)
+ }
+ if err != nil {
+ ctx.Data["Err_PushMirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ remoteSuffix, err := util.CryptoRandomString(10)
+ if err != nil {
+ ctx.ServerError("RandomString", err)
+ return
+ }
+
+ remoteAddress, err := util.SanitizeURL(address)
+ if err != nil {
+ ctx.Data["Err_PushMirrorAddress"] = true
+ handleSettingRemoteAddrError(ctx, err, form)
+ return
+ }
+
+ m := &repo_model.PushMirror{
+ RepoID: repo.ID,
+ Repo: repo,
+ RemoteName: fmt.Sprintf("remote_mirror_%s", remoteSuffix),
+ SyncOnCommit: form.PushMirrorSyncOnCommit,
+ Interval: interval,
+ RemoteAddress: remoteAddress,
+ }
+
+ var plainPrivateKey []byte
+ if form.PushMirrorUseSSH {
+ publicKey, privateKey, err := util.GenerateSSHKeypair()
+ if err != nil {
+ ctx.ServerError("GenerateSSHKeypair", err)
+ return
+ }
+ plainPrivateKey = privateKey
+ m.PublicKey = string(publicKey)
+ }
+
+ if err := db.Insert(ctx, m); err != nil {
+ ctx.ServerError("InsertPushMirror", err)
+ return
+ }
+
+ if form.PushMirrorUseSSH {
+ if err := m.SetPrivatekey(ctx, plainPrivateKey); err != nil {
+ ctx.ServerError("SetPrivatekey", err)
+ return
+ }
+ }
+
+ if err := mirror_service.AddPushMirrorRemote(ctx, m, address); err != nil {
+ if err := repo_model.DeletePushMirrors(ctx, repo_model.PushMirrorOptions{ID: m.ID, RepoID: m.RepoID}); err != nil {
+ log.Error("DeletePushMirrors %v", err)
+ }
+ ctx.ServerError("AddPushMirrorRemote", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "signing":
+ changed := false
+ trustModel := repo_model.ToTrustModel(form.TrustModel)
+ if trustModel != repo.TrustModel {
+ repo.TrustModel = trustModel
+ changed = true
+ }
+
+ if changed {
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+ }
+ log.Trace("Repository signing settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "admin":
+ if !ctx.Doer.IsAdmin {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if repo.IsFsckEnabled != form.EnableHealthCheck {
+ repo.IsFsckEnabled = form.EnableHealthCheck
+ }
+
+ if err := repo_service.UpdateRepository(ctx, repo, false); err != nil {
+ ctx.ServerError("UpdateRepository", err)
+ return
+ }
+
+ log.Trace("Repository admin settings updated: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_settings_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "admin_index":
+ if !ctx.Doer.IsAdmin {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ switch form.RequestReindexType {
+ case "stats":
+ if err := stats.UpdateRepoIndexer(ctx.Repo.Repository); err != nil {
+ ctx.ServerError("UpdateStatsRepondexer", err)
+ return
+ }
+ case "code":
+ if !setting.Indexer.RepoIndexerEnabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ code.UpdateRepoIndexer(ctx.Repo.Repository)
+ default:
+ ctx.NotFound("", nil)
+ return
+ }
+
+ log.Trace("Repository reindex for %s requested: %s/%s", form.RequestReindexType, ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.reindex_requested"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "convert":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if !repo.IsMirror {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ repo.IsMirror = false
+
+ if _, err := repo_service.CleanUpMigrateInfo(ctx, repo); err != nil {
+ ctx.ServerError("CleanUpMigrateInfo", err)
+ return
+ } else if err = repo_model.DeleteMirrorByRepoID(ctx, ctx.Repo.Repository.ID); err != nil {
+ ctx.ServerError("DeleteMirrorByRepoID", err)
+ return
+ }
+ log.Trace("Repository converted from mirror to regular: %s", repo.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.convert_succeed"))
+ ctx.Redirect(repo.Link())
+
+ case "convert_fork":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if err := repo.LoadOwner(ctx); err != nil {
+ ctx.ServerError("Convert Fork", err)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if !repo.IsFork {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ if !ctx.Repo.Owner.CanCreateRepo() {
+ maxCreationLimit := ctx.Repo.Owner.MaxCreationLimit()
+ msg := ctx.TrN(maxCreationLimit, "repo.form.reach_limit_of_creation_1", "repo.form.reach_limit_of_creation_n", maxCreationLimit)
+ ctx.Flash.Error(msg)
+ ctx.Redirect(repo.Link() + "/settings")
+ return
+ }
+
+ if err := repo_service.ConvertForkToNormalRepository(ctx, repo); err != nil {
+ log.Error("Unable to convert repository %-v from fork. Error: %v", repo, err)
+ ctx.ServerError("Convert Fork", err)
+ return
+ }
+
+ log.Trace("Repository converted from fork to regular: %s", repo.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.convert_fork_succeed"))
+ ctx.Redirect(repo.Link())
+
+ case "transfer":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ newOwner, err := user_model.GetUserByName(ctx, ctx.FormString("new_owner_name"))
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
+ return
+ }
+ ctx.ServerError("IsUserExist", err)
+ return
+ }
+
+ if newOwner.Type == user_model.UserTypeOrganization {
+ if !ctx.Doer.IsAdmin && newOwner.Visibility == structs.VisibleTypePrivate && !organization.OrgFromUser(newOwner).HasMemberWithUserID(ctx, ctx.Doer.ID) {
+ // The user shouldn't know about this organization
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_owner_name"), tplSettingsOptions, nil)
+ return
+ }
+ }
+
+ // Check the quota of the new owner
+ ok, err := quota_model.EvaluateForUser(ctx, newOwner.ID, quota_model.LimitSubjectSizeReposAll)
+ if err != nil {
+ ctx.ServerError("quota_model.EvaluateForUser", err)
+ return
+ }
+ if !ok {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.transfer_quota_exceeded", newOwner.Name), tplSettingsOptions, &form)
+ return
+ }
+
+ // Close the GitRepo if open
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ ctx.Repo.GitRepo = nil
+ }
+
+ oldFullname := repo.FullName()
+ if err := repo_service.StartRepositoryTransfer(ctx, ctx.Doer, newOwner, repo, nil); err != nil {
+ if errors.Is(err, user_model.ErrBlockedByUser) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_blocked_doer"), tplSettingsOptions, nil)
+ } else if repo_model.IsErrRepoAlreadyExist(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.new_owner_has_same_repo"), tplSettingsOptions, nil)
+ } else if models.IsErrRepoTransferInProgress(err) {
+ ctx.RenderWithErr(ctx.Tr("repo.settings.transfer_in_progress"), tplSettingsOptions, nil)
+ } else {
+ ctx.ServerError("TransferOwnership", err)
+ }
+
+ return
+ }
+
+ if ctx.Repo.Repository.Status == repo_model.RepositoryPendingTransfer {
+ log.Trace("Repository transfer process was started: %s/%s -> %s", ctx.Repo.Owner.Name, repo.Name, newOwner)
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_started", newOwner.DisplayName()))
+ } else {
+ log.Trace("Repository transferred: %s -> %s", oldFullname, ctx.Repo.Repository.FullName())
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_succeed"))
+ }
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "cancel_transfer":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ repoTransfer, err := models.GetPendingRepositoryTransfer(ctx, ctx.Repo.Repository)
+ if err != nil {
+ if models.IsErrNoPendingTransfer(err) {
+ ctx.Flash.Error("repo.settings.transfer_abort_invalid")
+ ctx.Redirect(repo.Link() + "/settings")
+ } else {
+ ctx.ServerError("GetPendingRepositoryTransfer", err)
+ }
+
+ return
+ }
+
+ if err := repoTransfer.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadRecipient", err)
+ return
+ }
+
+ if err := repo_service.CancelRepositoryTransfer(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("CancelRepositoryTransfer", err)
+ return
+ }
+
+ log.Trace("Repository transfer process was cancelled: %s/%s ", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Flash.Success(ctx.Tr("repo.settings.transfer_abort_success", repoTransfer.Recipient.Name))
+ ctx.Redirect(repo.Link() + "/settings")
+
+ case "delete":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ // Close the gitrepository before doing this.
+ if ctx.Repo.GitRepo != nil {
+ ctx.Repo.GitRepo.Close()
+ }
+
+ if err := repo_service.DeleteRepository(ctx, ctx.Doer, ctx.Repo.Repository, true); err != nil {
+ ctx.ServerError("DeleteRepository", err)
+ return
+ }
+ log.Trace("Repository deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.deletion_success"))
+ ctx.Redirect(ctx.Repo.Owner.DashboardLink())
+
+ case "delete-wiki":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ err := wiki_service.DeleteWiki(ctx, repo)
+ if err != nil {
+ log.Error("Delete Wiki: %v", err.Error())
+ }
+ log.Trace("Repository wiki deleted: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.wiki_deletion_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "rename-wiki-branch":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if repo.FullName() != form.RepoName {
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_repo_name"), tplSettingsOptions, nil)
+ return
+ }
+
+ if err := wiki_service.NormalizeWikiBranch(ctx, repo, setting.Repository.DefaultBranch); err != nil {
+ log.Error("Normalize Wiki branch: %v", err.Error())
+ ctx.Flash.Error(ctx.Tr("repo.settings.wiki_branch_rename_failure"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+ log.Trace("Repository wiki normalized: %s#%s", repo.FullName(), setting.Repository.DefaultBranch)
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.wiki_branch_rename_success"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "archive":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if repo.IsMirror {
+ ctx.Flash.Error(ctx.Tr("repo.settings.archive.error_ismirror"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if err := repo_model.SetArchiveRepoState(ctx, repo, true); err != nil {
+ log.Error("Tried to archive a repo: %s", err)
+ ctx.Flash.Error(ctx.Tr("repo.settings.archive.error"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.archive.success"))
+
+ log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ case "unarchive":
+ if !ctx.Repo.IsOwner() {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+
+ if err := repo_model.SetArchiveRepoState(ctx, repo, false); err != nil {
+ log.Error("Tried to unarchive a repo: %s", err)
+ ctx.Flash.Error(ctx.Tr("repo.settings.unarchive.error"))
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+ return
+ }
+
+ if ctx.Repo.Repository.UnitEnabled(ctx, unit_model.TypeActions) {
+ if err := actions_service.DetectAndHandleSchedules(ctx, repo); err != nil {
+ log.Error("DetectAndHandleSchedules for un-archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.unarchive.success"))
+
+ log.Trace("Repository was un-archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
+ ctx.Redirect(ctx.Repo.RepoLink + "/settings")
+
+ default:
+ ctx.NotFound("", nil)
+ }
+}
+
+func handleSettingRemoteAddrError(ctx *context.Context, err error, form *forms.RepoSettingForm) {
+ if models.IsErrInvalidCloneAddr(err) {
+ addrErr := err.(*models.ErrInvalidCloneAddr)
+ switch {
+ case addrErr.IsProtocolInvalid:
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_protocol_invalid"), tplSettingsOptions, form)
+ case addrErr.IsURLError:
+ ctx.RenderWithErr(ctx.Tr("form.url_error", addrErr.Host), tplSettingsOptions, form)
+ case addrErr.IsPermissionDenied:
+ if addrErr.LocalPath {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied"), tplSettingsOptions, form)
+ } else {
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.permission_denied_blocked"), tplSettingsOptions, form)
+ }
+ case addrErr.IsInvalidPath:
+ ctx.RenderWithErr(ctx.Tr("repo.migrate.invalid_local_path"), tplSettingsOptions, form)
+ default:
+ ctx.ServerError("Unknown error", err)
+ }
+ return
+ }
+ ctx.RenderWithErr(ctx.Tr("repo.mirror_address_url_invalid"), tplSettingsOptions, form)
+}
+
+func selectPushMirrorByForm(ctx *context.Context, form *forms.RepoSettingForm, repo *repo_model.Repository) (*repo_model.PushMirror, error) {
+ id, err := strconv.ParseInt(form.PushMirrorID, 10, 64)
+ if err != nil {
+ return nil, err
+ }
+
+ pushMirrors, _, err := repo_model.GetPushMirrorsByRepoID(ctx, repo.ID, db.ListOptions{})
+ if err != nil {
+ return nil, err
+ }
+
+ for _, m := range pushMirrors {
+ if m.ID == id {
+ m.Repo = repo
+ return m, nil
+ }
+ }
+
+ return nil, fmt.Errorf("PushMirror[%v] not associated to repository %v", id, repo)
+}
diff --git a/routers/web/repo/setting/settings_test.go b/routers/web/repo/setting/settings_test.go
new file mode 100644
index 0000000..0c8553f
--- /dev/null
+++ b/routers/web/repo/setting/settings_test.go
@@ -0,0 +1,412 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+ "testing"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ "code.gitea.io/gitea/models/perm"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func createSSHAuthorizedKeysTmpPath(t *testing.T) func() {
+ tmpDir := t.TempDir()
+
+ oldPath := setting.SSH.RootPath
+ setting.SSH.RootPath = tmpDir
+
+ return func() {
+ setting.SSH.RootPath = oldPath
+ }
+}
+
+func TestAddReadOnlyDeployKey(t *testing.T) {
+ if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
+ defer deferable()
+ } else {
+ return
+ }
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/settings/keys")
+
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+
+ addKeyForm := forms.AddKeyForm{
+ Title: "read-only",
+ Content: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
+ }
+ web.SetForm(ctx, &addKeyForm)
+ DeployKeysPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
+ Name: addKeyForm.Title,
+ Content: addKeyForm.Content,
+ Mode: perm.AccessModeRead,
+ })
+}
+
+func TestAddReadWriteOnlyDeployKey(t *testing.T) {
+ if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
+ defer deferable()
+ } else {
+ return
+ }
+
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/settings/keys")
+
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 2)
+
+ addKeyForm := forms.AddKeyForm{
+ Title: "read-write",
+ Content: "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQC4cn+iXnA4KvcQYSV88vGn0Yi91vG47t1P7okprVmhNTkipNRIHWr6WdCO4VDr/cvsRkuVJAsLO2enwjGWWueOO6BodiBgyAOZ/5t5nJNMCNuLGT5UIo/RI1b0WRQwxEZTRjt6mFNw6lH14wRd8ulsr9toSWBPMOGWoYs1PDeDL0JuTjL+tr1SZi/EyxCngpYszKdXllJEHyI79KQgeD0Vt3pTrkbNVTOEcCNqZePSVmUH8X8Vhugz3bnE0/iE9Pb5fkWO9c4AnM1FgI/8Bvp27Fw2ShryIXuR6kKvUqhVMTuOSDHwu6A8jLE5Owt3GAYugDpDYuwTVNGrHLXKpPzrGGPE/jPmaLCMZcsdkec95dYeU3zKODEm8UQZFhmJmDeWVJ36nGrGZHL4J5aTTaeFUJmmXDaJYiJ+K2/ioKgXqnXvltu0A9R8/LGy4nrTJRr4JMLuJFoUXvGm1gXQ70w2LSpk6yl71RNC0hCtsBe8BP8IhYCM0EP5jh7eCMQZNvM= nocomment\n",
+ IsWritable: true,
+ }
+ web.SetForm(ctx, &addKeyForm)
+ DeployKeysPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
+ Name: addKeyForm.Title,
+ Content: addKeyForm.Content,
+ Mode: perm.AccessModeWrite,
+ })
+}
+
+func TestCollaborationPost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 4)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user4")
+
+ u := &user_model.User{
+ ID: 2,
+ LowerName: "user2",
+ Type: user_model.UserTypeIndividual,
+ }
+
+ re := &repo_model.Repository{
+ ID: 2,
+ Owner: u,
+ OwnerID: u.ID,
+ }
+
+ repo := &context.Repository{
+ Owner: u,
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
+ require.NoError(t, err)
+ assert.True(t, exists)
+}
+
+func TestCollaborationPost_InactiveUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 9)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user9")
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ LowerName: "user2",
+ },
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadUser(t, ctx, 4)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user4")
+
+ u := &user_model.User{
+ ID: 2,
+ LowerName: "user2",
+ Type: user_model.UserTypeIndividual,
+ }
+
+ re := &repo_model.Repository{
+ ID: 2,
+ Owner: u,
+ OwnerID: u.ID,
+ }
+
+ repo := &context.Repository{
+ Owner: u,
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+
+ exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
+ require.NoError(t, err)
+ assert.True(t, exists)
+
+ // Try adding the same collaborator again
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestCollaborationPost_NonExistentUser(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/issues/labels")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ ctx.Req.Form.Set("collaborator", "user34")
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ LowerName: "user2",
+ },
+ }
+
+ ctx.Repo = repo
+
+ CollaborationPost(ctx)
+
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Empty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_NotAllowed(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: false,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ assert.False(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_AddTeamTwice(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team11")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 11,
+ OrgID: 26,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+
+ AddTeamPost(ctx)
+ assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestAddTeamPost_NonExistentTeam(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org26/repo43")
+
+ ctx.Req.Form.Set("team", "team-non-existent")
+
+ org := &user_model.User{
+ LowerName: "org26",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ re := &repo_model.Repository{
+ ID: 43,
+ Owner: org,
+ OwnerID: 26,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 26,
+ LowerName: "org26",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ AddTeamPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.NotEmpty(t, ctx.Flash.ErrorMsg)
+}
+
+func TestDeleteTeam(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "org3/team1/repo3")
+
+ ctx.Req.Form.Set("id", "2")
+
+ org := &user_model.User{
+ LowerName: "org3",
+ Type: user_model.UserTypeOrganization,
+ }
+
+ team := &organization.Team{
+ ID: 2,
+ OrgID: 3,
+ }
+
+ re := &repo_model.Repository{
+ ID: 3,
+ Owner: org,
+ OwnerID: 3,
+ }
+
+ repo := &context.Repository{
+ Owner: &user_model.User{
+ ID: 3,
+ LowerName: "org3",
+ RepoAdminChangeTeamAccess: true,
+ },
+ Repository: re,
+ }
+
+ ctx.Repo = repo
+
+ DeleteTeam(ctx)
+
+ assert.False(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
+}
diff --git a/routers/web/repo/setting/variables.go b/routers/web/repo/setting/variables.go
new file mode 100644
index 0000000..45b6c0f
--- /dev/null
+++ b/routers/web/repo/setting/variables.go
@@ -0,0 +1,140 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ shared "code.gitea.io/gitea/routers/web/shared/actions"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplRepoVariables base.TplName = "repo/settings/actions"
+ tplOrgVariables base.TplName = "org/settings/actions"
+ tplUserVariables base.TplName = "user/settings/actions"
+ tplAdminVariables base.TplName = "admin/actions"
+)
+
+type variablesCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsRepo bool
+ IsOrg bool
+ IsUser bool
+ IsGlobal bool
+ VariablesTemplate base.TplName
+ RedirectLink string
+}
+
+func getVariablesCtx(ctx *context.Context) (*variablesCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &variablesCtx{
+ OwnerID: 0,
+ RepoID: ctx.Repo.Repository.ID,
+ IsRepo: true,
+ VariablesTemplate: tplRepoVariables,
+ RedirectLink: ctx.Repo.RepoLink + "/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return nil, nil
+ }
+ return &variablesCtx{
+ OwnerID: ctx.ContextUser.ID,
+ RepoID: 0,
+ IsOrg: true,
+ VariablesTemplate: tplOrgVariables,
+ RedirectLink: ctx.Org.OrgLink + "/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &variablesCtx{
+ OwnerID: ctx.Doer.ID,
+ RepoID: 0,
+ IsUser: true,
+ VariablesTemplate: tplUserVariables,
+ RedirectLink: setting.AppSubURL + "/user/settings/actions/variables",
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &variablesCtx{
+ OwnerID: 0,
+ RepoID: 0,
+ IsGlobal: true,
+ VariablesTemplate: tplAdminVariables,
+ RedirectLink: setting.AppSubURL + "/admin/actions/variables",
+ }, nil
+ }
+
+ return nil, errors.New("unable to set Variables context")
+}
+
+func Variables(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("actions.variables")
+ ctx.Data["PageType"] = "variables"
+ ctx.Data["PageIsSharedSettingsVariables"] = true
+
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ shared.SetVariablesContext(ctx, vCtx.OwnerID, vCtx.RepoID)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, vCtx.VariablesTemplate)
+}
+
+func VariableCreate(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ if ctx.HasError() { // form binding validation error
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.CreateVariable(ctx, vCtx.OwnerID, vCtx.RepoID, vCtx.RedirectLink)
+}
+
+func VariableUpdate(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+
+ if ctx.HasError() { // form binding validation error
+ ctx.JSONError(ctx.GetErrMsg())
+ return
+ }
+
+ shared.UpdateVariable(ctx, vCtx.RedirectLink)
+}
+
+func VariableDelete(ctx *context.Context) {
+ vCtx, err := getVariablesCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getVariablesCtx", err)
+ return
+ }
+ shared.DeleteVariable(ctx, vCtx.RedirectLink)
+}
diff --git a/routers/web/repo/setting/webhook.go b/routers/web/repo/setting/webhook.go
new file mode 100644
index 0000000..eee493e
--- /dev/null
+++ b/routers/web/repo/setting/webhook.go
@@ -0,0 +1,485 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "path"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/setting"
+ api "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/web/middleware"
+ webhook_module "code.gitea.io/gitea/modules/webhook"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/forms"
+ webhook_service "code.gitea.io/gitea/services/webhook"
+
+ "gitea.com/go-chi/binding"
+)
+
+const (
+ tplHooks base.TplName = "repo/settings/webhook/base"
+ tplHookNew base.TplName = "repo/settings/webhook/new"
+ tplOrgHookNew base.TplName = "org/settings/hook_new"
+ tplUserHookNew base.TplName = "user/settings/hook_new"
+ tplAdminHookNew base.TplName = "admin/hook_new"
+)
+
+// WebhookList render web hooks list page
+func WebhookList(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.hooks")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["BaseLink"] = ctx.Repo.RepoLink + "/settings/hooks"
+ ctx.Data["BaseLinkNew"] = ctx.Repo.RepoLink + "/settings/hooks"
+ ctx.Data["WebhookList"] = webhook_service.List()
+ ctx.Data["Description"] = ctx.Tr("repo.settings.hooks_desc", "https://forgejo.org/docs/latest/user/webhooks/")
+
+ ws, err := db.Find[webhook.Webhook](ctx, webhook.ListWebhookOptions{RepoID: ctx.Repo.Repository.ID})
+ if err != nil {
+ ctx.ServerError("GetWebhooksByRepoID", err)
+ return
+ }
+ ctx.Data["Webhooks"] = ws
+
+ ctx.HTML(http.StatusOK, tplHooks)
+}
+
+type ownerRepoCtx struct {
+ OwnerID int64
+ RepoID int64
+ IsAdmin bool
+ IsSystemWebhook bool
+ Link string
+ LinkNew string
+ NewTemplate base.TplName
+}
+
+// getOwnerRepoCtx determines whether this is a repo, owner, or admin (both default and system) context.
+func getOwnerRepoCtx(ctx *context.Context) (*ownerRepoCtx, error) {
+ if ctx.Data["PageIsRepoSettings"] == true {
+ return &ownerRepoCtx{
+ RepoID: ctx.Repo.Repository.ID,
+ Link: path.Join(ctx.Repo.RepoLink, "settings/hooks"),
+ LinkNew: path.Join(ctx.Repo.RepoLink, "settings/hooks"),
+ NewTemplate: tplHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsOrgSettings"] == true {
+ return &ownerRepoCtx{
+ OwnerID: ctx.ContextUser.ID,
+ Link: path.Join(ctx.Org.OrgLink, "settings/hooks"),
+ LinkNew: path.Join(ctx.Org.OrgLink, "settings/hooks"),
+ NewTemplate: tplOrgHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsUserSettings"] == true {
+ return &ownerRepoCtx{
+ OwnerID: ctx.Doer.ID,
+ Link: path.Join(setting.AppSubURL, "/user/settings/hooks"),
+ LinkNew: path.Join(setting.AppSubURL, "/user/settings/hooks"),
+ NewTemplate: tplUserHookNew,
+ }, nil
+ }
+
+ if ctx.Data["PageIsAdmin"] == true {
+ return &ownerRepoCtx{
+ IsAdmin: true,
+ IsSystemWebhook: ctx.Params(":configType") == "system-hooks",
+ Link: path.Join(setting.AppSubURL, "/admin/hooks"),
+ LinkNew: path.Join(setting.AppSubURL, "/admin/", ctx.Params(":configType")),
+ NewTemplate: tplAdminHookNew,
+ }, nil
+ }
+
+ return nil, errors.New("unable to set OwnerRepo context")
+}
+
+// WebhookNew render creating webhook page
+func WebhookNew(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.add_webhook")
+ ctx.Data["Webhook"] = webhook.Webhook{HookEvent: &webhook_module.HookEvent{}}
+
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return
+ }
+
+ if orCtx.IsAdmin && orCtx.IsSystemWebhook {
+ ctx.Data["PageIsAdminSystemHooks"] = true
+ ctx.Data["PageIsAdminSystemHooksNew"] = true
+ } else if orCtx.IsAdmin {
+ ctx.Data["PageIsAdminDefaultHooks"] = true
+ ctx.Data["PageIsAdminDefaultHooksNew"] = true
+ } else {
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksNew"] = true
+ }
+
+ hookType := ctx.Params(":type")
+ handler := webhook_service.GetWebhookHandler(hookType)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+ ctx.Data["HookType"] = hookType
+ ctx.Data["WebhookHandler"] = handler
+ ctx.Data["BaseLink"] = orCtx.LinkNew
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ ctx.HTML(http.StatusOK, orCtx.NewTemplate)
+}
+
+// ParseHookEvent convert web form content to webhook.HookEvent
+func ParseHookEvent(form forms.WebhookCoreForm) *webhook_module.HookEvent {
+ return &webhook_module.HookEvent{
+ PushOnly: form.PushOnly(),
+ SendEverything: form.SendEverything(),
+ ChooseEvents: form.ChooseEvents(),
+ HookEvents: webhook_module.HookEvents{
+ Create: form.Create,
+ Delete: form.Delete,
+ Fork: form.Fork,
+ Issues: form.Issues,
+ IssueAssign: form.IssueAssign,
+ IssueLabel: form.IssueLabel,
+ IssueMilestone: form.IssueMilestone,
+ IssueComment: form.IssueComment,
+ Release: form.Release,
+ Push: form.Push,
+ PullRequest: form.PullRequest,
+ PullRequestAssign: form.PullRequestAssign,
+ PullRequestLabel: form.PullRequestLabel,
+ PullRequestMilestone: form.PullRequestMilestone,
+ PullRequestComment: form.PullRequestComment,
+ PullRequestReview: form.PullRequestReview,
+ PullRequestSync: form.PullRequestSync,
+ PullRequestReviewRequest: form.PullRequestReviewRequest,
+ Wiki: form.Wiki,
+ Repository: form.Repository,
+ Package: form.Package,
+ },
+ BranchFilter: form.BranchFilter,
+ }
+}
+
+func WebhookCreate(ctx *context.Context) {
+ hookType := ctx.Params(":type")
+ handler := webhook_service.GetWebhookHandler(hookType)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+
+ fields := handler.UnmarshalForm(func(form any) {
+ errs := binding.Bind(ctx.Req, form)
+ middleware.Validate(errs, ctx.Data, form, ctx.Locale) // error checked below in ctx.HasError
+ })
+
+ ctx.Data["Title"] = ctx.Tr("repo.settings.add_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksNew"] = true
+ ctx.Data["Webhook"] = webhook.Webhook{HookEvent: &webhook_module.HookEvent{}}
+ ctx.Data["HookType"] = hookType
+ ctx.Data["WebhookHandler"] = handler
+
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return
+ }
+ ctx.Data["BaseLink"] = orCtx.LinkNew
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ if ctx.HasError() {
+ // pre-fill the form with the submitted data
+ var w webhook.Webhook
+ w.URL = fields.URL
+ w.ContentType = fields.ContentType
+ w.Secret = fields.Secret
+ w.HookEvent = ParseHookEvent(fields.WebhookCoreForm)
+ w.IsActive = fields.Active
+ w.HTTPMethod = fields.HTTPMethod
+ err := w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+ ctx.Data["Webhook"] = w
+ ctx.Data["HookMetadata"] = fields.Metadata
+
+ ctx.HTML(http.StatusUnprocessableEntity, orCtx.NewTemplate)
+ return
+ }
+
+ var meta []byte
+ if fields.Metadata != nil {
+ meta, err = json.Marshal(fields.Metadata)
+ if err != nil {
+ ctx.ServerError("Marshal", err)
+ return
+ }
+ }
+
+ w := &webhook.Webhook{
+ RepoID: orCtx.RepoID,
+ URL: fields.URL,
+ HTTPMethod: fields.HTTPMethod,
+ ContentType: fields.ContentType,
+ Secret: fields.Secret,
+ HookEvent: ParseHookEvent(fields.WebhookCoreForm),
+ IsActive: fields.Active,
+ Type: hookType,
+ Meta: string(meta),
+ OwnerID: orCtx.OwnerID,
+ IsSystemWebhook: orCtx.IsSystemWebhook,
+ }
+ err = w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+ if err := w.UpdateEvent(); err != nil {
+ ctx.ServerError("UpdateEvent", err)
+ return
+ } else if err := webhook.CreateWebhook(ctx, w); err != nil {
+ ctx.ServerError("CreateWebhook", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.add_hook_success"))
+ ctx.Redirect(orCtx.Link)
+}
+
+func WebhookUpdate(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.update_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksEdit"] = true
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Webhook"] = w
+
+ handler := webhook_service.GetWebhookHandler(w.Type)
+ if handler == nil {
+ ctx.NotFound("GetWebhookHandler", nil)
+ return
+ }
+
+ fields := handler.UnmarshalForm(func(form any) {
+ errs := binding.Bind(ctx.Req, form)
+ middleware.Validate(errs, ctx.Data, form, ctx.Locale) // error checked below in ctx.HasError
+ })
+
+ // pre-fill the form with the submitted data
+ w.URL = fields.URL
+ w.ContentType = fields.ContentType
+ w.Secret = fields.Secret
+ w.HookEvent = ParseHookEvent(fields.WebhookCoreForm)
+ w.IsActive = fields.Active
+ w.HTTPMethod = fields.HTTPMethod
+
+ err := w.SetHeaderAuthorization(fields.AuthorizationHeader)
+ if err != nil {
+ ctx.ServerError("SetHeaderAuthorization", err)
+ return
+ }
+
+ if ctx.HasError() {
+ ctx.Data["HookMetadata"] = fields.Metadata
+ ctx.HTML(http.StatusUnprocessableEntity, orCtx.NewTemplate)
+ return
+ }
+
+ var meta []byte
+ if fields.Metadata != nil {
+ meta, err = json.Marshal(fields.Metadata)
+ if err != nil {
+ ctx.ServerError("Marshal", err)
+ return
+ }
+ }
+
+ w.Meta = string(meta)
+
+ if err := w.UpdateEvent(); err != nil {
+ ctx.ServerError("UpdateEvent", err)
+ return
+ } else if err := webhook.UpdateWebhook(ctx, w); err != nil {
+ ctx.ServerError("UpdateWebhook", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.update_hook_success"))
+ ctx.Redirect(fmt.Sprintf("%s/%d", orCtx.Link, w.ID))
+}
+
+func checkWebhook(ctx *context.Context) (*ownerRepoCtx, *webhook.Webhook) {
+ orCtx, err := getOwnerRepoCtx(ctx)
+ if err != nil {
+ ctx.ServerError("getOwnerRepoCtx", err)
+ return nil, nil
+ }
+ ctx.Data["BaseLink"] = orCtx.Link
+ ctx.Data["BaseLinkNew"] = orCtx.LinkNew
+ ctx.Data["WebhookList"] = webhook_service.List()
+
+ var w *webhook.Webhook
+ if orCtx.RepoID > 0 {
+ w, err = webhook.GetWebhookByRepoID(ctx, orCtx.RepoID, ctx.ParamsInt64(":id"))
+ } else if orCtx.OwnerID > 0 {
+ w, err = webhook.GetWebhookByOwnerID(ctx, orCtx.OwnerID, ctx.ParamsInt64(":id"))
+ } else if orCtx.IsAdmin {
+ w, err = webhook.GetSystemOrDefaultWebhook(ctx, ctx.ParamsInt64(":id"))
+ }
+ if err != nil || w == nil {
+ if webhook.IsErrWebhookNotExist(err) {
+ ctx.NotFound("GetWebhookByID", nil)
+ } else {
+ ctx.ServerError("GetWebhookByID", err)
+ }
+ return nil, nil
+ }
+
+ ctx.Data["HookType"] = w.Type
+
+ if handler := webhook_service.GetWebhookHandler(w.Type); handler != nil {
+ ctx.Data["HookMetadata"] = handler.Metadata(w)
+ ctx.Data["WebhookHandler"] = handler
+ }
+
+ ctx.Data["History"], err = w.History(ctx, 1)
+ if err != nil {
+ ctx.ServerError("History", err)
+ }
+ return orCtx, w
+}
+
+// WebhookEdit render editing web hook page
+func WebhookEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.settings.update_webhook")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["PageIsSettingsHooksEdit"] = true
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Webhook"] = w
+
+ ctx.HTML(http.StatusOK, orCtx.NewTemplate)
+}
+
+// WebhookTest test if web hook is work fine
+func WebhookTest(ctx *context.Context) {
+ hookID := ctx.ParamsInt64(":id")
+ w, err := webhook.GetWebhookByRepoID(ctx, ctx.Repo.Repository.ID, hookID)
+ if err != nil {
+ ctx.Flash.Error("GetWebhookByRepoID: " + err.Error())
+ ctx.Status(http.StatusInternalServerError)
+ return
+ }
+
+ // Grab latest commit or fake one if it's empty repository.
+ commit := ctx.Repo.Commit
+ if commit == nil {
+ ghost := user_model.NewGhostUser()
+ objectFormat := git.ObjectFormatFromName(ctx.Repo.Repository.ObjectFormatName)
+ commit = &git.Commit{
+ ID: objectFormat.EmptyObjectID(),
+ Author: ghost.NewGitSig(),
+ Committer: ghost.NewGitSig(),
+ CommitMessage: "This is a fake commit",
+ }
+ }
+
+ apiUser := convert.ToUserWithAccessMode(ctx, ctx.Doer, perm.AccessModeNone)
+
+ apiCommit := &api.PayloadCommit{
+ ID: commit.ID.String(),
+ Message: commit.Message(),
+ URL: ctx.Repo.Repository.HTMLURL() + "/commit/" + url.PathEscape(commit.ID.String()),
+ Author: &api.PayloadUser{
+ Name: commit.Author.Name,
+ Email: commit.Author.Email,
+ },
+ Committer: &api.PayloadUser{
+ Name: commit.Committer.Name,
+ Email: commit.Committer.Email,
+ },
+ }
+
+ commitID := commit.ID.String()
+ p := &api.PushPayload{
+ Ref: git.BranchPrefix + ctx.Repo.Repository.DefaultBranch,
+ Before: commitID,
+ After: commitID,
+ CompareURL: setting.AppURL + ctx.Repo.Repository.ComposeCompareURL(commitID, commitID),
+ Commits: []*api.PayloadCommit{apiCommit},
+ TotalCommits: 1,
+ HeadCommit: apiCommit,
+ Repo: convert.ToRepo(ctx, ctx.Repo.Repository, access_model.Permission{AccessMode: perm.AccessModeNone}),
+ Pusher: apiUser,
+ Sender: apiUser,
+ }
+ if err := webhook_service.PrepareWebhook(ctx, w, webhook_module.HookEventPush, p); err != nil {
+ ctx.Flash.Error("PrepareWebhook: " + err.Error())
+ ctx.Status(http.StatusInternalServerError)
+ } else {
+ ctx.Flash.Info(ctx.Tr("repo.settings.webhook.delivery.success"))
+ ctx.Status(http.StatusOK)
+ }
+}
+
+// WebhookReplay replays a webhook
+func WebhookReplay(ctx *context.Context) {
+ hookTaskUUID := ctx.Params(":uuid")
+
+ orCtx, w := checkWebhook(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if err := webhook_service.ReplayHookTask(ctx, w, hookTaskUUID); err != nil {
+ if webhook.IsErrHookTaskNotExist(err) {
+ ctx.NotFound("ReplayHookTask", nil)
+ } else {
+ ctx.ServerError("ReplayHookTask", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook.delivery.success"))
+ ctx.Redirect(fmt.Sprintf("%s/%d", orCtx.Link, w.ID))
+}
+
+// WebhookDelete delete a webhook
+func WebhookDelete(ctx *context.Context) {
+ if err := webhook.DeleteWebhookByRepoID(ctx, ctx.Repo.Repository.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteWebhookByRepoID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook_deletion_success"))
+ }
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/settings/hooks")
+}
diff --git a/routers/web/repo/topic.go b/routers/web/repo/topic.go
new file mode 100644
index 0000000..d81a695
--- /dev/null
+++ b/routers/web/repo/topic.go
@@ -0,0 +1,60 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+ "strings"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/context"
+)
+
+// TopicsPost response for creating repository
+func TopicsPost(ctx *context.Context) {
+ if ctx.Doer == nil {
+ ctx.JSON(http.StatusForbidden, map[string]any{
+ "message": "Only owners could change the topics.",
+ })
+ return
+ }
+
+ topics := make([]string, 0)
+ topicsStr := ctx.FormTrim("topics")
+ if len(topicsStr) > 0 {
+ topics = strings.Split(topicsStr, ",")
+ }
+
+ validTopics, invalidTopics := repo_model.SanitizeAndValidateTopics(topics)
+
+ if len(validTopics) > 25 {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]any{
+ "invalidTopics": nil,
+ "message": ctx.Tr("repo.topic.count_prompt"),
+ })
+ return
+ }
+
+ if len(invalidTopics) > 0 {
+ ctx.JSON(http.StatusUnprocessableEntity, map[string]any{
+ "invalidTopics": invalidTopics,
+ "message": ctx.Tr("repo.topic.format_prompt"),
+ })
+ return
+ }
+
+ err := repo_model.SaveTopics(ctx, ctx.Repo.Repository.ID, validTopics...)
+ if err != nil {
+ log.Error("SaveTopics failed: %v", err)
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "message": "Save topics failed.",
+ })
+ return
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "status": "ok",
+ })
+}
diff --git a/routers/web/repo/treelist.go b/routers/web/repo/treelist.go
new file mode 100644
index 0000000..d11af46
--- /dev/null
+++ b/routers/web/repo/treelist.go
@@ -0,0 +1,54 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/go-enry/go-enry/v2"
+)
+
+// TreeList get all files' entries of a repository
+func TreeList(ctx *context.Context) {
+ tree, err := ctx.Repo.Commit.SubTree("/")
+ if err != nil {
+ ctx.ServerError("Repo.Commit.SubTree", err)
+ return
+ }
+
+ entries, err := tree.ListEntriesRecursiveFast()
+ if err != nil {
+ ctx.ServerError("ListEntriesRecursiveFast", err)
+ return
+ }
+ entries.CustomSort(base.NaturalSortLess)
+
+ files := make([]string, 0, len(entries))
+ for _, entry := range entries {
+ if !isExcludedEntry(entry) {
+ files = append(files, entry.Name())
+ }
+ }
+ ctx.JSON(http.StatusOK, files)
+}
+
+func isExcludedEntry(entry *git.TreeEntry) bool {
+ if entry.IsDir() {
+ return true
+ }
+
+ if entry.IsSubModule() {
+ return true
+ }
+
+ if enry.IsVendor(entry.Name()) {
+ return true
+ }
+
+ return false
+}
diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go
new file mode 100644
index 0000000..f1445c5
--- /dev/null
+++ b/routers/web/repo/view.go
@@ -0,0 +1,1258 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ gocontext "context"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "html/template"
+ "image"
+ "io"
+ "net/http"
+ "net/url"
+ "path"
+ "slices"
+ "strings"
+ "time"
+
+ _ "image/gif" // for processing gif images
+ _ "image/jpeg" // for processing jpeg images
+ _ "image/png" // for processing png images
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ admin_model "code.gitea.io/gitea/models/admin"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issue_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ unit_model "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/actions"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/highlight"
+ "code.gitea.io/gitea/modules/lfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/svg"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ files_service "code.gitea.io/gitea/services/repository/files"
+
+ "github.com/nektos/act/pkg/model"
+
+ _ "golang.org/x/image/bmp" // for processing bmp images
+ _ "golang.org/x/image/webp" // for processing webp images
+)
+
+const (
+ tplRepoEMPTY base.TplName = "repo/empty"
+ tplRepoHome base.TplName = "repo/home"
+ tplRepoViewList base.TplName = "repo/view_list"
+ tplWatchers base.TplName = "repo/watchers"
+ tplForks base.TplName = "repo/forks"
+ tplMigrating base.TplName = "repo/migrate/migrating"
+)
+
+// locate a README for a tree in one of the supported paths.
+//
+// entries is passed to reduce calls to ListEntries(), so
+// this has precondition:
+//
+// entries == ctx.Repo.Commit.SubTree(ctx.Repo.TreePath).ListEntries()
+//
+// FIXME: There has to be a more efficient way of doing this
+func FindReadmeFileInEntries(ctx *context.Context, entries []*git.TreeEntry, tryWellKnownDirs bool) (string, *git.TreeEntry, error) {
+ // Create a list of extensions in priority order
+ // 1. Markdown files - with and without localisation - e.g. README.en-us.md or README.md
+ // 2. Org-Mode files - with and without localisation - e.g. README.en-us.org or README.org
+ // 3. Txt files - e.g. README.txt
+ // 4. No extension - e.g. README
+ exts := append(append(localizedExtensions(".md", ctx.Locale.Language()), localizedExtensions(".org", ctx.Locale.Language())...), ".txt", "") // sorted by priority
+ extCount := len(exts)
+ readmeFiles := make([]*git.TreeEntry, extCount+1)
+
+ docsEntries := make([]*git.TreeEntry, 3) // (one of docs/, .gitea/ or .github/)
+ for _, entry := range entries {
+ if tryWellKnownDirs && entry.IsDir() {
+ // as a special case for the top-level repo introduction README,
+ // fall back to subfolders, looking for e.g. docs/README.md, .gitea/README.zh-CN.txt, .github/README.txt, ...
+ // (note that docsEntries is ignored unless we are at the root)
+ lowerName := strings.ToLower(entry.Name())
+ switch lowerName {
+ case "docs":
+ if entry.Name() == "docs" || docsEntries[0] == nil {
+ docsEntries[0] = entry
+ }
+ case ".forgejo":
+ if entry.Name() == ".forgejo" || docsEntries[1] == nil {
+ docsEntries[1] = entry
+ }
+ case ".gitea":
+ if entry.Name() == ".gitea" || docsEntries[1] == nil {
+ docsEntries[1] = entry
+ }
+ case ".github":
+ if entry.Name() == ".github" || docsEntries[2] == nil {
+ docsEntries[2] = entry
+ }
+ }
+ continue
+ }
+ if i, ok := util.IsReadmeFileExtension(entry.Name(), exts...); ok {
+ log.Debug("Potential readme file: %s", entry.Name())
+ if readmeFiles[i] == nil || base.NaturalSortLess(readmeFiles[i].Name(), entry.Blob().Name()) {
+ if entry.IsLink() {
+ target, _, err := entry.FollowLinks()
+ if err != nil && !git.IsErrBadLink(err) {
+ return "", nil, err
+ } else if target != nil && (target.IsExecutable() || target.IsRegular()) {
+ readmeFiles[i] = entry
+ }
+ } else {
+ readmeFiles[i] = entry
+ }
+ }
+ }
+ }
+ var readmeFile *git.TreeEntry
+ for _, f := range readmeFiles {
+ if f != nil {
+ readmeFile = f
+ break
+ }
+ }
+
+ if ctx.Repo.TreePath == "" && readmeFile == nil {
+ for _, subTreeEntry := range docsEntries {
+ if subTreeEntry == nil {
+ continue
+ }
+ subTree := subTreeEntry.Tree()
+ if subTree == nil {
+ // this should be impossible; if subTreeEntry exists so should this.
+ continue
+ }
+ var err error
+ childEntries, err := subTree.ListEntries()
+ if err != nil {
+ return "", nil, err
+ }
+
+ subfolder, readmeFile, err := FindReadmeFileInEntries(ctx, childEntries, false)
+ if err != nil && !git.IsErrNotExist(err) {
+ return "", nil, err
+ }
+ if readmeFile != nil {
+ return path.Join(subTreeEntry.Name(), subfolder), readmeFile, nil
+ }
+ }
+ }
+
+ return "", readmeFile, nil
+}
+
+func renderDirectory(ctx *context.Context) {
+ entries := renderDirectoryFiles(ctx, 1*time.Second)
+ if ctx.Written() {
+ return
+ }
+
+ if ctx.Repo.TreePath != "" {
+ ctx.Data["HideRepoInfo"] = true
+ ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefName)
+ }
+
+ subfolder, readmeFile, err := FindReadmeFileInEntries(ctx, entries, true)
+ if err != nil {
+ ctx.ServerError("findReadmeFileInEntries", err)
+ return
+ }
+
+ renderReadmeFile(ctx, subfolder, readmeFile)
+}
+
+// localizedExtensions prepends the provided language code with and without a
+// regional identifier to the provided extension.
+// Note: the language code will always be lower-cased, if a region is present it must be separated with a `-`
+// Note: ext should be prefixed with a `.`
+func localizedExtensions(ext, languageCode string) (localizedExts []string) {
+ if len(languageCode) < 1 {
+ return []string{ext}
+ }
+
+ lowerLangCode := "." + strings.ToLower(languageCode)
+
+ if strings.Contains(lowerLangCode, "-") {
+ underscoreLangCode := strings.ReplaceAll(lowerLangCode, "-", "_")
+ indexOfDash := strings.Index(lowerLangCode, "-")
+ // e.g. [.zh-cn.md, .zh_cn.md, .zh.md, _zh.md, .md]
+ return []string{lowerLangCode + ext, underscoreLangCode + ext, lowerLangCode[:indexOfDash] + ext, "_" + lowerLangCode[1:indexOfDash] + ext, ext}
+ }
+
+ // e.g. [.en.md, .md]
+ return []string{lowerLangCode + ext, ext}
+}
+
+type fileInfo struct {
+ isTextFile bool
+ isLFSFile bool
+ fileSize int64
+ lfsMeta *lfs.Pointer
+ st typesniffer.SniffedType
+}
+
+func getFileReader(ctx gocontext.Context, repoID int64, blob *git.Blob) ([]byte, io.ReadCloser, *fileInfo, error) {
+ dataRc, err := blob.DataAsync()
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ buf := make([]byte, 1024)
+ n, _ := util.ReadAtMost(dataRc, buf)
+ buf = buf[:n]
+
+ st := typesniffer.DetectContentType(buf)
+ isTextFile := st.IsText()
+
+ // FIXME: what happens when README file is an image?
+ if !isTextFile || !setting.LFS.StartServer {
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ pointer, _ := lfs.ReadPointerFromBuffer(buf)
+ if !pointer.IsValid() { // fallback to plain file
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ meta, err := git_model.GetLFSMetaObjectByOid(ctx, repoID, pointer.Oid)
+ if err != nil { // fallback to plain file
+ log.Warn("Unable to access LFS pointer %s in repo %d: %v", pointer.Oid, repoID, err)
+ return buf, dataRc, &fileInfo{isTextFile, false, blob.Size(), nil, st}, nil
+ }
+
+ dataRc.Close()
+
+ dataRc, err = lfs.ReadMetaObject(pointer)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ buf = make([]byte, 1024)
+ n, err = util.ReadAtMost(dataRc, buf)
+ if err != nil {
+ dataRc.Close()
+ return nil, nil, nil, err
+ }
+ buf = buf[:n]
+
+ st = typesniffer.DetectContentType(buf)
+
+ return buf, dataRc, &fileInfo{st.IsText(), true, meta.Size, &meta.Pointer, st}, nil
+}
+
+func renderReadmeFile(ctx *context.Context, subfolder string, readmeFile *git.TreeEntry) {
+ target := readmeFile
+ if readmeFile != nil && readmeFile.IsLink() {
+ target, _, _ = readmeFile.FollowLinks()
+ }
+ if target == nil {
+ // if findReadmeFile() failed and/or gave us a broken symlink (which it shouldn't)
+ // simply skip rendering the README
+ return
+ }
+
+ ctx.Data["RawFileLink"] = ""
+ ctx.Data["ReadmeInList"] = true
+ ctx.Data["ReadmeExist"] = true
+ ctx.Data["FileIsSymlink"] = readmeFile.IsLink()
+
+ buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, target.Blob())
+ if err != nil {
+ ctx.ServerError("getFileReader", err)
+ return
+ }
+ defer dataRc.Close()
+
+ ctx.Data["FileIsText"] = fInfo.isTextFile
+ ctx.Data["FileName"] = path.Join(subfolder, readmeFile.Name())
+ ctx.Data["IsLFSFile"] = fInfo.isLFSFile
+
+ if fInfo.isLFSFile {
+ filenameBase64 := base64.RawURLEncoding.EncodeToString([]byte(readmeFile.Name()))
+ ctx.Data["RawFileLink"] = fmt.Sprintf("%s.git/info/lfs/objects/%s/%s", ctx.Repo.Repository.Link(), url.PathEscape(fInfo.lfsMeta.Oid), url.PathEscape(filenameBase64))
+ }
+
+ if !fInfo.isTextFile {
+ return
+ }
+
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ // Pretend that this is a normal text file to display 'This file is too large to be shown'
+ ctx.Data["IsFileTooLarge"] = true
+ ctx.Data["IsTextFile"] = true
+ ctx.Data["FileSize"] = fInfo.fileSize
+ return
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ if markupType := markup.Type(readmeFile.Name()); markupType != "" {
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: path.Join(ctx.Repo.TreePath, readmeFile.Name()), // ctx.Repo.TreePath is the directory not the Readme so we must append the Readme filename (and path).
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Join(ctx.Repo.TreePath, subfolder),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ log.Error("Render failed for %s in %-v: %v Falling back to rendering source", readmeFile.Name(), ctx.Repo.Repository, err)
+ delete(ctx.Data, "IsMarkup")
+ }
+ }
+
+ if ctx.Data["IsMarkup"] != true {
+ ctx.Data["IsPlainText"] = true
+ content, err := io.ReadAll(rd)
+ if err != nil {
+ log.Error("Read readme content failed: %v", err)
+ }
+ contentEscaped := template.HTMLEscapeString(util.UnsafeBytesToString(content))
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"] = charset.EscapeControlHTML(template.HTML(contentEscaped), ctx.Locale, charset.FileviewContext)
+ }
+
+ if !fInfo.isLFSFile && ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ ctx.Data["CanEditReadmeFile"] = true
+ }
+}
+
+func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
+ // Show latest commit info of repository in table header,
+ // or of directory if not in root directory.
+ ctx.Data["LatestCommit"] = latestCommit
+ if latestCommit != nil {
+ verification := asymkey_model.ParseCommitWithSignature(ctx, latestCommit)
+
+ if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
+ }, nil); err != nil {
+ ctx.ServerError("CalculateTrustStatus", err)
+ return false
+ }
+ ctx.Data["LatestCommitVerification"] = verification
+ ctx.Data["LatestCommitUser"] = user_model.ValidateCommitWithEmail(ctx, latestCommit)
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, latestCommit.ID.String(), db.ListOptionsAll)
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ }
+ if !ctx.Repo.CanRead(unit_model.TypeActions) {
+ git_model.CommitStatusesHideActionsURL(ctx, statuses)
+ }
+
+ ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(statuses)
+ ctx.Data["LatestCommitStatuses"] = statuses
+ }
+
+ return true
+}
+
+func renderFile(ctx *context.Context, entry *git.TreeEntry) {
+ ctx.Data["IsViewFile"] = true
+ ctx.Data["HideRepoInfo"] = true
+ blob := entry.Blob()
+ buf, dataRc, fInfo, err := getFileReader(ctx, ctx.Repo.Repository.ID, blob)
+ if err != nil {
+ ctx.ServerError("getFileReader", err)
+ return
+ }
+ defer dataRc.Close()
+
+ ctx.Data["Title"] = ctx.Tr("repo.file.title", ctx.Repo.Repository.Name+"/"+ctx.Repo.TreePath, ctx.Repo.RefName)
+ ctx.Data["FileIsSymlink"] = entry.IsLink()
+ ctx.Data["FileName"] = blob.Name()
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/raw/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+
+ if entry.IsLink() {
+ _, link, err := entry.FollowLinks()
+ // Errors should be allowed, because this shouldn't
+ // block rendering invalid symlink files.
+ if err == nil {
+ ctx.Data["SymlinkURL"] = ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(link)
+ }
+ }
+
+ commit, err := ctx.Repo.Commit.GetCommitByPath(ctx.Repo.TreePath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+
+ if !loadLatestCommitData(ctx, commit) {
+ return
+ }
+
+ if ctx.Repo.TreePath == ".editorconfig" {
+ _, editorconfigWarning, editorconfigErr := ctx.Repo.GetEditorconfig(ctx.Repo.Commit)
+ if editorconfigWarning != nil {
+ ctx.Data["FileWarning"] = strings.TrimSpace(editorconfigWarning.Error())
+ }
+ if editorconfigErr != nil {
+ ctx.Data["FileError"] = strings.TrimSpace(editorconfigErr.Error())
+ }
+ } else if issue_service.IsTemplateConfig(ctx.Repo.TreePath) {
+ _, issueConfigErr := issue_service.GetTemplateConfig(ctx.Repo.GitRepo, ctx.Repo.TreePath, ctx.Repo.Commit)
+ if issueConfigErr != nil {
+ ctx.Data["FileError"] = strings.TrimSpace(issueConfigErr.Error())
+ }
+ } else if actions.IsWorkflow(ctx.Repo.TreePath) {
+ content, err := actions.GetContentFromEntry(entry)
+ if err != nil {
+ log.Error("actions.GetContentFromEntry: %v", err)
+ }
+ _, workFlowErr := model.ReadWorkflow(bytes.NewReader(content))
+ if workFlowErr != nil {
+ ctx.Data["FileError"] = ctx.Locale.Tr("actions.runs.invalid_workflow_helper", workFlowErr.Error())
+ }
+ } else if slices.Contains([]string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}, ctx.Repo.TreePath) {
+ if data, err := blob.GetBlobContent(setting.UI.MaxDisplayFileSize); err == nil {
+ _, warnings := issue_model.GetCodeOwnersFromContent(ctx, data)
+ if len(warnings) > 0 {
+ ctx.Data["FileWarning"] = strings.Join(warnings, "\n")
+ }
+ }
+ }
+
+ isDisplayingSource := ctx.FormString("display") == "source"
+ isDisplayingRendered := !isDisplayingSource
+
+ if fInfo.isLFSFile {
+ ctx.Data["RawFileLink"] = ctx.Repo.RepoLink + "/media/" + ctx.Repo.BranchNameSubURL() + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+
+ isRepresentableAsText := fInfo.st.IsRepresentableAsText()
+ if !isRepresentableAsText {
+ // If we can't show plain text, always try to render.
+ isDisplayingSource = false
+ isDisplayingRendered = true
+ }
+ ctx.Data["IsLFSFile"] = fInfo.isLFSFile
+ ctx.Data["FileSize"] = fInfo.fileSize
+ ctx.Data["IsTextFile"] = fInfo.isTextFile
+ ctx.Data["IsRepresentableAsText"] = isRepresentableAsText
+ ctx.Data["IsDisplayingSource"] = isDisplayingSource
+ ctx.Data["IsDisplayingRendered"] = isDisplayingRendered
+ ctx.Data["IsExecutable"] = entry.IsExecutable()
+
+ isTextSource := fInfo.isTextFile || isDisplayingSource
+ ctx.Data["IsTextSource"] = isTextSource
+ if isTextSource {
+ ctx.Data["CanCopyContent"] = true
+ }
+
+ // Check LFS Lock
+ lfsLock, err := git_model.GetTreePathLock(ctx, ctx.Repo.Repository.ID, ctx.Repo.TreePath)
+ ctx.Data["LFSLock"] = lfsLock
+ if err != nil {
+ ctx.ServerError("GetTreePathLock", err)
+ return
+ }
+ if lfsLock != nil {
+ u, err := user_model.GetUserByID(ctx, lfsLock.OwnerID)
+ if err != nil {
+ ctx.ServerError("GetTreePathLock", err)
+ return
+ }
+ ctx.Data["LFSLockOwner"] = u.Name
+ ctx.Data["LFSLockOwnerHomeLink"] = u.HomeLink()
+ ctx.Data["LFSLockHint"] = ctx.Tr("repo.editor.this_file_locked")
+ }
+
+ // Assume file is not editable first.
+ if fInfo.isLFSFile {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_lfs_files")
+ } else if !isRepresentableAsText {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.cannot_edit_non_text_files")
+ }
+
+ switch {
+ case isRepresentableAsText:
+ if fInfo.st.IsSvgImage() {
+ ctx.Data["IsImageFile"] = true
+ ctx.Data["CanCopyContent"] = true
+ ctx.Data["HasSourceRenderedToggle"] = true
+ }
+
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ rd := charset.ToUTF8WithFallbackReader(io.MultiReader(bytes.NewReader(buf), dataRc), charset.ConvertOpts{})
+
+ shouldRenderSource := ctx.FormString("display") == "source"
+ readmeExist := util.IsReadmeFileName(blob.Name())
+ ctx.Data["ReadmeExist"] = readmeExist
+
+ markupType := markup.Type(blob.Name())
+ // If the markup is detected by custom markup renderer it should not be reset later on
+ // to not pass it down to the render context.
+ detected := false
+ if markupType == "" {
+ detected = true
+ markupType = markup.DetectRendererType(blob.Name(), bytes.NewReader(buf))
+ }
+ if markupType != "" {
+ ctx.Data["HasSourceRenderedToggle"] = true
+ }
+
+ if markupType != "" && !shouldRenderSource {
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+ if !detected {
+ markupType = ""
+ }
+ metas := ctx.Repo.Repository.ComposeDocumentMetas(ctx)
+ metas["BranchNameSubURL"] = ctx.Repo.BranchNameSubURL()
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ Type: markupType,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: metas,
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ ctx.ServerError("Render", err)
+ return
+ }
+ // to prevent iframe load third-party url
+ ctx.Resp.Header().Add("Content-Security-Policy", "frame-src 'self'")
+ } else {
+ buf, _ := io.ReadAll(rd)
+
+ // The Open Group Base Specification: https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap03.html
+ // empty: 0 lines; "a": 1 incomplete-line; "a\n": 1 line; "a\nb": 1 line, 1 incomplete-line;
+ // Forgejo uses the definition (like most modern editors):
+ // empty: 0 lines; "a": 1 line; "a\n": 1 line; "a\nb": 2 lines;
+ // When rendering, the last empty line is not rendered in U and isn't counted towards the number of lines.
+ // To tell users that the file not contains a trailing EOL, text with a tooltip is displayed in the file header.
+ // Trailing EOL is only considered if the file has content.
+ // This NumLines is only used for the display on the UI: "xxx lines"
+ if len(buf) == 0 {
+ ctx.Data["NumLines"] = 0
+ } else {
+ hasNoTrailingEOL := !bytes.HasSuffix(buf, []byte{'\n'})
+ ctx.Data["HasNoTrailingEOL"] = hasNoTrailingEOL
+
+ numLines := bytes.Count(buf, []byte{'\n'})
+ if hasNoTrailingEOL {
+ numLines++
+ }
+ ctx.Data["NumLines"] = numLines
+ }
+ ctx.Data["NumLinesSet"] = true
+
+ language, err := files_service.TryGetContentLanguage(ctx.Repo.GitRepo, ctx.Repo.CommitID, ctx.Repo.TreePath)
+ if err != nil {
+ log.Error("Unable to get file language for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err)
+ }
+
+ fileContent, lexerName, err := highlight.File(blob.Name(), language, buf)
+ ctx.Data["LexerName"] = lexerName
+ if err != nil {
+ log.Error("highlight.File failed, fallback to plain text: %v", err)
+ fileContent = highlight.PlainText(buf)
+ }
+ status := &charset.EscapeStatus{}
+ statuses := make([]*charset.EscapeStatus, len(fileContent))
+ for i, line := range fileContent {
+ statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale, charset.FileviewContext)
+ status = status.Or(statuses[i])
+ }
+ ctx.Data["EscapeStatus"] = status
+ ctx.Data["FileContent"] = fileContent
+ ctx.Data["LineEscapeStatus"] = statuses
+ }
+ if !fInfo.isLFSFile {
+ if ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
+ ctx.Data["CanEditFile"] = false
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
+ } else {
+ ctx.Data["CanEditFile"] = true
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.edit_this_file")
+ }
+ } else if !ctx.Repo.IsViewBranch {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
+ } else if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ ctx.Data["EditFileTooltip"] = ctx.Tr("repo.editor.fork_before_edit")
+ }
+ }
+
+ case fInfo.st.IsPDF():
+ ctx.Data["IsPDFFile"] = true
+ case fInfo.st.IsVideo():
+ ctx.Data["IsVideoFile"] = true
+ case fInfo.st.IsAudio():
+ ctx.Data["IsAudioFile"] = true
+ case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
+ ctx.Data["IsImageFile"] = true
+ ctx.Data["CanCopyContent"] = true
+ default:
+ if fInfo.fileSize >= setting.UI.MaxDisplayFileSize {
+ ctx.Data["IsFileTooLarge"] = true
+ break
+ }
+
+ if markupType := markup.Type(blob.Name()); markupType != "" {
+ rd := io.MultiReader(bytes.NewReader(buf), dataRc)
+ ctx.Data["IsMarkup"] = true
+ ctx.Data["MarkupType"] = markupType
+ ctx.Data["EscapeStatus"], ctx.Data["FileContent"], err = markupRender(ctx, &markup.RenderContext{
+ Ctx: ctx,
+ RelativePath: ctx.Repo.TreePath,
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: ctx.Repo.BranchNameSubURL(),
+ TreePath: path.Dir(ctx.Repo.TreePath),
+ },
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ }, rd)
+ if err != nil {
+ ctx.ServerError("Render", err)
+ return
+ }
+ }
+ }
+
+ if ctx.Repo.GitRepo != nil {
+ attrs, err := ctx.Repo.GitRepo.GitAttributes(ctx.Repo.CommitID, ctx.Repo.TreePath, "linguist-vendored", "linguist-generated")
+ if err != nil {
+ log.Error("GitAttributes(%s, %s) failed: %v", ctx.Repo.CommitID, ctx.Repo.TreePath, err)
+ } else {
+ ctx.Data["IsVendored"] = attrs["linguist-vendored"].Bool().Value()
+ ctx.Data["IsGenerated"] = attrs["linguist-generated"].Bool().Value()
+ }
+ }
+
+ if fInfo.st.IsImage() && !fInfo.st.IsSvgImage() {
+ img, _, err := image.DecodeConfig(bytes.NewReader(buf))
+ if err == nil {
+ // There are Image formats go can't decode
+ // Instead of throwing an error in that case, we show the size only when we can decode
+ ctx.Data["ImageSize"] = fmt.Sprintf("%dx%dpx", img.Width, img.Height)
+ }
+ }
+
+ if ctx.Repo.CanEnableEditor(ctx, ctx.Doer) {
+ if lfsLock != nil && lfsLock.OwnerID != ctx.Doer.ID {
+ ctx.Data["CanDeleteFile"] = false
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.this_file_locked")
+ } else {
+ ctx.Data["CanDeleteFile"] = true
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.delete_this_file")
+ }
+ } else if !ctx.Repo.IsViewBranch {
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_be_on_a_branch")
+ } else if !ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ ctx.Data["DeleteFileTooltip"] = ctx.Tr("repo.editor.must_have_write_access")
+ }
+}
+
+func markupRender(ctx *context.Context, renderCtx *markup.RenderContext, input io.Reader) (escaped *charset.EscapeStatus, output template.HTML, err error) {
+ markupRd, markupWr := io.Pipe()
+ defer markupWr.Close()
+ done := make(chan struct{})
+ go func() {
+ sb := &strings.Builder{}
+ // We allow NBSP here this is rendered
+ escaped, _ = charset.EscapeControlReader(markupRd, sb, ctx.Locale, charset.FileviewContext, charset.RuneNBSP)
+ output = template.HTML(sb.String())
+ close(done)
+ }()
+ err = markup.Render(renderCtx, input, markupWr)
+ _ = markupWr.CloseWithError(err)
+ <-done
+ return escaped, output, err
+}
+
+func checkHomeCodeViewable(ctx *context.Context) {
+ if len(ctx.Repo.Units) > 0 {
+ if ctx.Repo.Repository.IsBeingCreated() {
+ task, err := admin_model.GetMigratingTask(ctx, ctx.Repo.Repository.ID)
+ if err != nil {
+ if admin_model.IsErrTaskDoesNotExist(err) {
+ ctx.Data["Repo"] = ctx.Repo
+ ctx.Data["CloneAddr"] = ""
+ ctx.Data["Failed"] = true
+ ctx.HTML(http.StatusOK, tplMigrating)
+ return
+ }
+ ctx.ServerError("models.GetMigratingTask", err)
+ return
+ }
+ cfg, err := task.MigrateConfig()
+ if err != nil {
+ ctx.ServerError("task.MigrateConfig", err)
+ return
+ }
+
+ ctx.Data["Repo"] = ctx.Repo
+ ctx.Data["MigrateTask"] = task
+ ctx.Data["CloneAddr"], _ = util.SanitizeURL(cfg.CloneAddr)
+ ctx.Data["Failed"] = task.Status == structs.TaskStatusFailed
+ ctx.HTML(http.StatusOK, tplMigrating)
+ return
+ }
+
+ if ctx.IsSigned {
+ // Set repo notification-status read if unread
+ if err := activities_model.SetRepoReadBy(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("ReadBy", err)
+ return
+ }
+ }
+
+ var firstUnit *unit_model.Unit
+ for _, repoUnit := range ctx.Repo.Units {
+ if repoUnit.Type == unit_model.TypeCode {
+ return
+ }
+
+ unit, ok := unit_model.Units[repoUnit.Type]
+ if ok && (firstUnit == nil || !firstUnit.IsLessThan(unit)) && repoUnit.Type.CanBeDefault() {
+ firstUnit = &unit
+ }
+ }
+
+ if firstUnit != nil {
+ ctx.Redirect(ctx.Repo.Repository.Link() + firstUnit.URI)
+ return
+ }
+ }
+
+ ctx.NotFound("Home", errors.New(ctx.Locale.TrString("units.error.no_unit_allowed_repo")))
+}
+
+func checkCitationFile(ctx *context.Context, entry *git.TreeEntry) {
+ if entry.Name() != "" {
+ return
+ }
+ tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.SubTree", err)
+ return
+ }
+ allEntries, err := tree.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return
+ }
+ for _, entry := range allEntries {
+ if entry.Name() == "CITATION.cff" || entry.Name() == "CITATION.bib" {
+ // Read Citation file contents
+ if content, err := entry.Blob().GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("checkCitationFile: GetBlobContent: %v", err)
+ } else {
+ ctx.Data["CitationExist"] = true
+ ctx.Data["CitationFile"] = entry.Name()
+ ctx.PageData["citationFileContent"] = content
+ break
+ }
+ }
+ }
+}
+
+// Home render repository home page
+func Home(ctx *context.Context) {
+ if setting.Other.EnableFeed {
+ isFeed, _, showFeedType := feed.GetFeedType(ctx.Params(":reponame"), ctx.Req)
+ if isFeed {
+ if ctx.Link == fmt.Sprintf("%s.%s", ctx.Repo.RepoLink, showFeedType) {
+ feed.ShowRepoFeed(ctx, ctx.Repo.Repository, showFeedType)
+ return
+ }
+
+ if ctx.Repo.Repository.IsEmpty {
+ ctx.NotFound("MustBeNotEmpty", nil)
+ return
+ }
+
+ if ctx.Repo.TreePath == "" {
+ feed.ShowBranchFeed(ctx, ctx.Repo.Repository, showFeedType)
+ } else {
+ feed.ShowFileFeed(ctx, ctx.Repo.Repository, showFeedType)
+ }
+ return
+ }
+ }
+
+ checkHomeCodeViewable(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ renderHomeCode(ctx)
+}
+
+// LastCommit returns lastCommit data for the provided branch/tag/commit and directory (in url) and filenames in body
+func LastCommit(ctx *context.Context) {
+ checkHomeCodeViewable(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ renderDirectoryFiles(ctx, 0)
+ if ctx.Written() {
+ return
+ }
+
+ var treeNames []string
+ paths := make([]string, 0, 5)
+ if len(ctx.Repo.TreePath) > 0 {
+ treeNames = strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ ctx.Data["HasParentPath"] = true
+ if len(paths)-2 >= 0 {
+ ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
+ }
+ }
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ ctx.Data["BranchLink"] = branchLink
+
+ ctx.HTML(http.StatusOK, tplRepoViewList)
+}
+
+func renderDirectoryFiles(ctx *context.Context, timeout time.Duration) git.Entries {
+ tree, err := ctx.Repo.Commit.SubTree(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.SubTree", err)
+ return nil
+ }
+
+ ctx.Data["LastCommitLoaderURL"] = ctx.Repo.RepoLink + "/lastcommit/" + url.PathEscape(ctx.Repo.CommitID) + "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return nil
+ }
+
+ if !entry.IsDir() {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return nil
+ }
+
+ allEntries, err := tree.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return nil
+ }
+ allEntries.CustomSort(base.NaturalSortLess)
+
+ commitInfoCtx := gocontext.Context(ctx)
+ if timeout > 0 {
+ var cancel gocontext.CancelFunc
+ commitInfoCtx, cancel = gocontext.WithTimeout(ctx, timeout)
+ defer cancel()
+ }
+
+ files, latestCommit, err := allEntries.GetCommitsInfo(commitInfoCtx, ctx.Repo.Commit, ctx.Repo.TreePath)
+ if err != nil {
+ ctx.ServerError("GetCommitsInfo", err)
+ return nil
+ }
+ ctx.Data["Files"] = files
+ for _, f := range files {
+ if f.Commit == nil {
+ ctx.Data["HasFilesWithoutLatestCommit"] = true
+ break
+ }
+ }
+
+ if !loadLatestCommitData(ctx, latestCommit) {
+ return nil
+ }
+
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treeLink := branchLink
+
+ if len(ctx.Repo.TreePath) > 0 {
+ treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+
+ ctx.Data["TreeLink"] = treeLink
+ ctx.Data["SSHDomain"] = setting.SSH.Domain
+
+ return allEntries
+}
+
+func renderLanguageStats(ctx *context.Context) {
+ langs, err := repo_model.GetTopLanguageStats(ctx, ctx.Repo.Repository, 5)
+ if err != nil {
+ ctx.ServerError("Repo.GetTopLanguageStats", err)
+ return
+ }
+
+ ctx.Data["LanguageStats"] = langs
+}
+
+func renderRepoTopics(ctx *context.Context) {
+ topics, _, err := repo_model.FindTopics(ctx, &repo_model.FindTopicOptions{
+ RepoID: ctx.Repo.Repository.ID,
+ })
+ if err != nil {
+ ctx.ServerError("models.FindTopics", err)
+ return
+ }
+ ctx.Data["Topics"] = topics
+}
+
+func prepareOpenWithEditorApps(ctx *context.Context) {
+ var tmplApps []map[string]any
+ apps := setting.Config().Repository.OpenWithEditorApps.Value(ctx)
+ if len(apps) == 0 {
+ apps = setting.DefaultOpenWithEditorApps()
+ }
+ for _, app := range apps {
+ schema, _, _ := strings.Cut(app.OpenURL, ":")
+ var iconHTML template.HTML
+ if schema == "vscode" || schema == "vscodium" || schema == "jetbrains" {
+ iconHTML = svg.RenderHTML(fmt.Sprintf("gitea-open-with-%s", schema), 16, "tw-mr-2")
+ } else {
+ iconHTML = svg.RenderHTML("gitea-git", 16, "tw-mr-2") // TODO: it could support user's customized icon in the future
+ }
+ tmplApps = append(tmplApps, map[string]any{
+ "DisplayName": app.DisplayName,
+ "OpenURL": app.OpenURL,
+ "IconHTML": iconHTML,
+ })
+ }
+ ctx.Data["OpenWithEditorApps"] = tmplApps
+}
+
+func renderHomeCode(ctx *context.Context) {
+ ctx.Data["PageIsViewCode"] = true
+ ctx.Data["RepositoryUploadEnabled"] = setting.Repository.Upload.Enabled
+ prepareOpenWithEditorApps(ctx)
+
+ if ctx.Repo.Commit == nil || ctx.Repo.Repository.IsEmpty || ctx.Repo.Repository.IsBroken() {
+ showEmpty := true
+ var err error
+ if ctx.Repo.GitRepo != nil {
+ showEmpty, err = ctx.Repo.GitRepo.IsEmpty()
+ if err != nil {
+ log.Error("GitRepo.IsEmpty: %v", err)
+ ctx.Repo.Repository.Status = repo_model.RepositoryBroken
+ showEmpty = true
+ ctx.Flash.Error(ctx.Tr("error.occurred"), true)
+ }
+ }
+ if showEmpty {
+ ctx.HTML(http.StatusOK, tplRepoEMPTY)
+ return
+ }
+
+ // the repo is not really empty, so we should update the modal in database
+ // such problem may be caused by:
+ // 1) an error occurs during pushing/receiving. 2) the user replaces an empty git repo manually
+ // and even more: the IsEmpty flag is deeply broken and should be removed with the UI changed to manage to cope with empty repos.
+ // it's possible for a repository to be non-empty by that flag but still 500
+ // because there are no branches - only tags -or the default branch is non-extant as it has been 0-pushed.
+ ctx.Repo.Repository.IsEmpty = false
+ if err = repo_model.UpdateRepositoryCols(ctx, ctx.Repo.Repository, "is_empty"); err != nil {
+ ctx.ServerError("UpdateRepositoryCols", err)
+ return
+ }
+ if err = repo_module.UpdateRepoSize(ctx, ctx.Repo.Repository); err != nil {
+ ctx.ServerError("UpdateRepoSize", err)
+ return
+ }
+
+ // the repo's IsEmpty has been updated, redirect to this page to make sure middlewares can get the correct values
+ link := ctx.Link
+ if ctx.Req.URL.RawQuery != "" {
+ link += "?" + ctx.Req.URL.RawQuery
+ }
+ ctx.Redirect(link)
+ return
+ }
+
+ title := ctx.Repo.Repository.Owner.Name + "/" + ctx.Repo.Repository.Name
+ if len(ctx.Repo.Repository.Description) > 0 {
+ title += ": " + ctx.Repo.Repository.Description
+ }
+ ctx.Data["Title"] = title
+
+ // Get Topics of this repo
+ renderRepoTopics(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ // Get current entry user currently looking at.
+ entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath)
+ if err != nil {
+ HandleGitError(ctx, "Repo.Commit.GetTreeEntryByPath", err)
+ return
+ }
+
+ checkOutdatedBranch(ctx)
+
+ checkCitationFile(ctx, entry)
+ if ctx.Written() {
+ return
+ }
+
+ renderLanguageStats(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ if entry.IsDir() {
+ renderDirectory(ctx)
+ } else {
+ renderFile(ctx, entry)
+ }
+ if ctx.Written() {
+ return
+ }
+
+ if ctx.Doer != nil {
+ if err := ctx.Repo.Repository.GetBaseRepo(ctx); err != nil {
+ ctx.ServerError("GetBaseRepo", err)
+ return
+ }
+
+ // If the repo is a mirror, don't display recently pushed branches.
+ if ctx.Repo.Repository.IsMirror {
+ goto PostRecentBranchCheck
+ }
+
+ // If pull requests aren't enabled for either the current repo, or its
+ // base, don't display recently pushed branches.
+ if !(ctx.Repo.Repository.AllowsPulls(ctx) ||
+ (ctx.Repo.Repository.BaseRepo != nil && ctx.Repo.Repository.BaseRepo.AllowsPulls(ctx))) {
+ goto PostRecentBranchCheck
+ }
+
+ // Find recently pushed new branches to *this* repo.
+ branches, err := git_model.FindRecentlyPushedNewBranches(ctx, ctx.Repo.Repository.ID, ctx.Doer.ID, ctx.Repo.Repository.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("FindRecentlyPushedBranches", err)
+ return
+ }
+
+ // If this is not a fork, check if the signed in user has a fork, and
+ // check branches there.
+ if !ctx.Repo.Repository.IsFork {
+ repo := repo_model.GetForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID)
+ if repo != nil {
+ baseBranches, err := git_model.FindRecentlyPushedNewBranches(ctx, repo.ID, ctx.Doer.ID, repo.DefaultBranch)
+ if err != nil {
+ ctx.ServerError("FindRecentlyPushedBranches", err)
+ return
+ }
+ branches = append(branches, baseBranches...)
+ }
+ }
+
+ // Filter out branches that have no relation to the default branch of
+ // the repository.
+ var filteredBranches []*git_model.Branch
+ for _, branch := range branches {
+ repo, err := branch.GetRepo(ctx)
+ if err != nil {
+ continue
+ }
+ gitRepo, err := git.OpenRepository(ctx, repo.RepoPath())
+ if err != nil {
+ continue
+ }
+ defer gitRepo.Close()
+ head, err := gitRepo.GetCommit(branch.CommitID)
+ if err != nil {
+ continue
+ }
+ defaultBranch, err := gitrepo.GetDefaultBranch(ctx, repo)
+ if err != nil {
+ continue
+ }
+ defaultBranchHead, err := gitRepo.GetCommit(defaultBranch)
+ if err != nil {
+ continue
+ }
+
+ hasMergeBase, err := head.HasPreviousCommit(defaultBranchHead.ID)
+ if err != nil {
+ continue
+ }
+
+ if hasMergeBase {
+ filteredBranches = append(filteredBranches, branch)
+ }
+ }
+
+ ctx.Data["RecentlyPushedNewBranches"] = filteredBranches
+ }
+
+PostRecentBranchCheck:
+ var treeNames []string
+ paths := make([]string, 0, 5)
+ if len(ctx.Repo.TreePath) > 0 {
+ treeNames = strings.Split(ctx.Repo.TreePath, "/")
+ for i := range treeNames {
+ paths = append(paths, strings.Join(treeNames[:i+1], "/"))
+ }
+
+ ctx.Data["HasParentPath"] = true
+ if len(paths)-2 >= 0 {
+ ctx.Data["ParentPath"] = "/" + paths[len(paths)-2]
+ }
+ }
+
+ ctx.Data["Paths"] = paths
+
+ branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
+ treeLink := branchLink
+ if len(ctx.Repo.TreePath) > 0 {
+ treeLink += "/" + util.PathEscapeSegments(ctx.Repo.TreePath)
+ }
+ ctx.Data["TreeLink"] = treeLink
+ ctx.Data["TreeNames"] = treeNames
+ ctx.Data["BranchLink"] = branchLink
+ ctx.Data["CodeIndexerDisabled"] = !setting.Indexer.RepoIndexerEnabled
+ ctx.HTML(http.StatusOK, tplRepoHome)
+}
+
+func checkOutdatedBranch(ctx *context.Context) {
+ if !(ctx.Repo.IsAdmin() || ctx.Repo.IsOwner()) {
+ return
+ }
+
+ // get the head commit of the branch since ctx.Repo.CommitID is not always the head commit of `ctx.Repo.BranchName`
+ commit, err := ctx.Repo.GitRepo.GetBranchCommit(ctx.Repo.BranchName)
+ if err != nil {
+ log.Error("GetBranchCommitID: %v", err)
+ // Don't return an error page, as it can be rechecked the next time the user opens the page.
+ return
+ }
+
+ dbBranch, err := git_model.GetBranch(ctx, ctx.Repo.Repository.ID, ctx.Repo.BranchName)
+ if err != nil {
+ log.Error("GetBranch: %v", err)
+ // Don't return an error page, as it can be rechecked the next time the user opens the page.
+ return
+ }
+
+ if dbBranch.CommitID != commit.ID.String() {
+ ctx.Flash.Warning(ctx.Tr("repo.error.broken_git_hook", "https://docs.gitea.com/help/faq#push-hook--webhook--actions-arent-running"), true)
+ }
+}
+
+// RenderUserCards render a page show users according the input template
+func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) {
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+ pager := context.NewPagination(total, setting.MaxUserCardsPerPage, page, 5)
+ ctx.Data["Page"] = pager
+
+ items, err := getter(db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.MaxUserCardsPerPage,
+ })
+ if err != nil {
+ ctx.ServerError("getter", err)
+ return
+ }
+ ctx.Data["Cards"] = items
+
+ ctx.HTML(http.StatusOK, tpl)
+}
+
+// Watchers render repository's watch users
+func Watchers(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.watchers")
+ ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
+ ctx.Data["PageIsWatchers"] = true
+
+ RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, func(opts db.ListOptions) ([]*user_model.User, error) {
+ return repo_model.GetRepoWatchers(ctx, ctx.Repo.Repository.ID, opts)
+ }, tplWatchers)
+}
+
+// Stars render repository's starred users
+func Stars(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.stargazers")
+ ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
+ ctx.Data["PageIsStargazers"] = true
+ RenderUserCards(ctx, ctx.Repo.Repository.NumStars, func(opts db.ListOptions) ([]*user_model.User, error) {
+ return repo_model.GetStargazers(ctx, ctx.Repo.Repository, opts)
+ }, tplWatchers)
+}
+
+// Forks render repository's forked users
+func Forks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.forks")
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ pager := context.NewPagination(ctx.Repo.Repository.NumForks, setting.MaxForksPerPage, page, 5)
+ ctx.Data["Page"] = pager
+
+ forks, err := repo_model.GetForks(ctx, ctx.Repo.Repository, db.ListOptions{
+ Page: pager.Paginater.Current(),
+ PageSize: setting.MaxForksPerPage,
+ })
+ if err != nil {
+ ctx.ServerError("GetForks", err)
+ return
+ }
+
+ for _, fork := range forks {
+ if err = fork.LoadOwner(ctx); err != nil {
+ ctx.ServerError("LoadOwner", err)
+ return
+ }
+ }
+
+ ctx.Data["Forks"] = forks
+
+ ctx.HTML(http.StatusOK, tplForks)
+}
diff --git a/routers/web/repo/view_test.go b/routers/web/repo/view_test.go
new file mode 100644
index 0000000..73ba118
--- /dev/null
+++ b/routers/web/repo/view_test.go
@@ -0,0 +1,62 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "reflect"
+ "testing"
+)
+
+func Test_localizedExtensions(t *testing.T) {
+ tests := []struct {
+ name string
+ ext string
+ languageCode string
+ wantLocalizedExts []string
+ }{
+ {
+ name: "empty language",
+ ext: ".md",
+ wantLocalizedExts: []string{".md"},
+ },
+ {
+ name: "No region - lowercase",
+ languageCode: "en",
+ ext: ".csv",
+ wantLocalizedExts: []string{".en.csv", ".csv"},
+ },
+ {
+ name: "No region - uppercase",
+ languageCode: "FR",
+ ext: ".txt",
+ wantLocalizedExts: []string{".fr.txt", ".txt"},
+ },
+ {
+ name: "With region - lowercase",
+ languageCode: "en-us",
+ ext: ".md",
+ wantLocalizedExts: []string{".en-us.md", ".en_us.md", ".en.md", "_en.md", ".md"},
+ },
+ {
+ name: "With region - uppercase",
+ languageCode: "en-CA",
+ ext: ".MD",
+ wantLocalizedExts: []string{".en-ca.MD", ".en_ca.MD", ".en.MD", "_en.MD", ".MD"},
+ },
+ {
+ name: "With region - all uppercase",
+ languageCode: "ZH-TW",
+ ext: ".md",
+ wantLocalizedExts: []string{".zh-tw.md", ".zh_tw.md", ".zh.md", "_zh.md", ".md"},
+ },
+ }
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ if gotLocalizedExts := localizedExtensions(tt.ext, tt.languageCode); !reflect.DeepEqual(gotLocalizedExts, tt.wantLocalizedExts) {
+ t.Errorf("localizedExtensions() = %v, want %v", gotLocalizedExts, tt.wantLocalizedExts)
+ }
+ })
+ }
+}
diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go
new file mode 100644
index 0000000..1fd0800
--- /dev/null
+++ b/routers/web/repo/wiki.go
@@ -0,0 +1,816 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "net/http"
+ "net/url"
+ "path/filepath"
+ "strings"
+
+ git_model "code.gitea.io/gitea/models/git"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/charset"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ notify_service "code.gitea.io/gitea/services/notify"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+)
+
+const (
+ tplWikiStart base.TplName = "repo/wiki/start"
+ tplWikiView base.TplName = "repo/wiki/view"
+ tplWikiRevision base.TplName = "repo/wiki/revision"
+ tplWikiNew base.TplName = "repo/wiki/new"
+ tplWikiPages base.TplName = "repo/wiki/pages"
+ tplWikiSearch base.TplName = "repo/wiki/search"
+)
+
+// MustEnableWiki check if wiki is enabled, if external then redirect
+func MustEnableWiki(ctx *context.Context) {
+ if !ctx.Repo.CanRead(unit.TypeWiki) &&
+ !ctx.Repo.CanRead(unit.TypeExternalWiki) {
+ if log.IsTrace() {
+ log.Trace("Permission Denied: User %-v cannot read %-v or %-v of repo %-v\n"+
+ "User in repo has Permissions: %-+v",
+ ctx.Doer,
+ unit.TypeWiki,
+ unit.TypeExternalWiki,
+ ctx.Repo.Repository,
+ ctx.Repo.Permission)
+ }
+ ctx.NotFound("MustEnableWiki", nil)
+ return
+ }
+
+ unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalWiki)
+ if err == nil {
+ ctx.Redirect(unit.ExternalWikiConfig().ExternalWikiURL)
+ return
+ }
+}
+
+// PageMeta wiki page meta information
+type PageMeta struct {
+ Name string
+ SubURL string
+ GitEntryName string
+ UpdatedUnix timeutil.TimeStamp
+}
+
+// findEntryForFile finds the tree entry for a target filepath.
+func findEntryForFile(commit *git.Commit, target string) (*git.TreeEntry, error) {
+ entry, err := commit.GetTreeEntryByPath(target)
+ if err != nil && !git.IsErrNotExist(err) {
+ return nil, err
+ }
+ if entry != nil {
+ return entry, nil
+ }
+
+ // Then the unescaped, the shortest alternative
+ var unescapedTarget string
+ if unescapedTarget, err = url.QueryUnescape(target); err != nil {
+ return nil, err
+ }
+ return commit.GetTreeEntryByPath(unescapedTarget)
+}
+
+func findWikiRepoCommit(ctx *context.Context) (*git.Repository, *git.Commit, error) {
+ wikiRepo, err := gitrepo.OpenWikiRepository(ctx, ctx.Repo.Repository)
+ if err != nil {
+ ctx.ServerError("OpenRepository", err)
+ return nil, nil, err
+ }
+
+ commit, err := wikiRepo.GetBranchCommit(ctx.Repo.Repository.GetWikiBranchName())
+ if err != nil {
+ return wikiRepo, nil, err
+ }
+ return wikiRepo, commit, nil
+}
+
+// wikiContentsByEntry returns the contents of the wiki page referenced by the
+// given tree entry. Writes to ctx if an error occurs.
+func wikiContentsByEntry(ctx *context.Context, entry *git.TreeEntry) []byte {
+ reader, err := entry.Blob().DataAsync()
+ if err != nil {
+ ctx.ServerError("Blob.Data", err)
+ return nil
+ }
+ defer reader.Close()
+ content, err := io.ReadAll(reader)
+ if err != nil {
+ ctx.ServerError("ReadAll", err)
+ return nil
+ }
+ return content
+}
+
+// wikiContentsByName returns the contents of a wiki page, along with a boolean
+// indicating whether the page exists. Writes to ctx if an error occurs.
+func wikiContentsByName(ctx *context.Context, commit *git.Commit, wikiName wiki_service.WebPath) ([]byte, *git.TreeEntry, string, bool) {
+ gitFilename := wiki_service.WebPathToGitPath(wikiName)
+ entry, err := findEntryForFile(commit, gitFilename)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findEntryForFile", err)
+ return nil, nil, "", false
+ } else if entry == nil {
+ return nil, nil, "", true
+ }
+ return wikiContentsByEntry(ctx, entry), entry, gitFilename, false
+}
+
+func renderViewPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return nil, nil
+ }
+
+ // Get page list.
+ entries, err := commit.ListEntries()
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("ListEntries", err)
+ return nil, nil
+ }
+ pages := make([]PageMeta, 0, len(entries))
+ for _, entry := range entries {
+ if !entry.IsRegular() {
+ continue
+ }
+ wikiName, err := wiki_service.GitPathToWebPath(entry.Name())
+ if err != nil {
+ if repo_model.IsErrWikiInvalidFileName(err) {
+ continue
+ }
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("WikiFilenameToName", err)
+ return nil, nil
+ } else if wikiName == "_Sidebar" || wikiName == "_Footer" {
+ continue
+ }
+ _, displayName := wiki_service.WebPathToUserTitle(wikiName)
+ pages = append(pages, PageMeta{
+ Name: displayName,
+ SubURL: wiki_service.WebPathToURLPath(wikiName),
+ GitEntryName: entry.Name(),
+ })
+ }
+ ctx.Data["Pages"] = pages
+
+ // get requested page name
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ isSideBar := pageName == "_Sidebar"
+ isFooter := pageName == "_Footer"
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+
+ var sidebarContent []byte
+ if !isSideBar {
+ sidebarContent, _, _, _ = wikiContentsByName(ctx, commit, "_Sidebar")
+ if ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+ } else {
+ sidebarContent = data
+ }
+
+ var footerContent []byte
+ if !isFooter {
+ footerContent, _, _, _ = wikiContentsByName(ctx, commit, "_Footer")
+ if ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+ } else {
+ footerContent = data
+ }
+
+ rctx := &markup.RenderContext{
+ Ctx: ctx,
+ Metas: ctx.Repo.Repository.ComposeDocumentMetas(ctx),
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ },
+ IsWiki: true,
+ }
+ buf := &strings.Builder{}
+
+ renderFn := func(data []byte) (escaped *charset.EscapeStatus, output string, err error) {
+ markupRd, markupWr := io.Pipe()
+ defer markupWr.Close()
+ done := make(chan struct{})
+ go func() {
+ // We allow NBSP here this is rendered
+ escaped, _ = charset.EscapeControlReader(markupRd, buf, ctx.Locale, charset.WikiContext, charset.RuneNBSP)
+ output = buf.String()
+ buf.Reset()
+ close(done)
+ }()
+
+ err = markdown.Render(rctx, bytes.NewReader(data), markupWr)
+ _ = markupWr.CloseWithError(err)
+ <-done
+ return escaped, output, err
+ }
+
+ ctx.Data["EscapeStatus"], ctx.Data["content"], err = renderFn(data)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+
+ if rctx.SidebarTocNode != nil {
+ sb := &strings.Builder{}
+ err = markdown.SpecializedMarkdown().Renderer().Render(sb, nil, rctx.SidebarTocNode)
+ if err != nil {
+ log.Error("Failed to render wiki sidebar TOC: %v", err)
+ } else {
+ ctx.Data["sidebarTocContent"] = sb.String()
+ }
+ }
+
+ if !isSideBar {
+ buf.Reset()
+ ctx.Data["sidebarEscapeStatus"], ctx.Data["sidebarContent"], err = renderFn(sidebarContent)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+ ctx.Data["sidebarPresent"] = sidebarContent != nil
+ } else {
+ ctx.Data["sidebarPresent"] = false
+ }
+
+ if !isFooter {
+ buf.Reset()
+ ctx.Data["footerEscapeStatus"], ctx.Data["footerContent"], err = renderFn(footerContent)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("Render", err)
+ return nil, nil
+ }
+ ctx.Data["footerPresent"] = footerContent != nil
+ } else {
+ ctx.Data["footerPresent"] = false
+ }
+
+ // get commit count - wiki revisions
+ commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.GetWikiBranchName(), pageFilename)
+ ctx.Data["CommitCount"] = commitsCount
+
+ return wikiRepo, entry
+}
+
+func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return nil, nil
+ }
+
+ // get requested pagename
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, pageFilename, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return nil, nil
+ }
+
+ ctx.Data["content"] = string(data)
+ ctx.Data["sidebarPresent"] = false
+ ctx.Data["sidebarContent"] = ""
+ ctx.Data["footerPresent"] = false
+ ctx.Data["footerContent"] = ""
+
+ // get commit count - wiki revisions
+ commitsCount, _ := wikiRepo.FileCommitsCount(ctx.Repo.Repository.GetWikiBranchName(), pageFilename)
+ ctx.Data["CommitCount"] = commitsCount
+
+ // get page
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+
+ // get Commit Count
+ commitsHistory, err := wikiRepo.CommitsByFileAndRange(
+ git.CommitsByFileAndRangeOptions{
+ Revision: ctx.Repo.Repository.GetWikiBranchName(),
+ File: pageFilename,
+ Page: page,
+ })
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ ctx.ServerError("CommitsByFileAndRange", err)
+ return nil, nil
+ }
+ ctx.Data["Commits"] = git_model.ConvertFromGitCommit(ctx, commitsHistory, ctx.Repo.Repository)
+
+ pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParamString("action", "_revision")
+ ctx.Data["Page"] = pager
+
+ return wikiRepo, entry
+}
+
+func renderEditPage(ctx *context.Context) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ if !git.IsErrNotExist(err) {
+ ctx.ServerError("GetBranchCommit", err)
+ }
+ return
+ }
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ // get requested pagename
+ pageName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(pageName) == 0 {
+ pageName = "Home"
+ }
+
+ _, displayName := wiki_service.WebPathToUserTitle(pageName)
+ ctx.Data["PageURL"] = wiki_service.WebPathToURLPath(pageName)
+ ctx.Data["old_title"] = displayName
+ ctx.Data["Title"] = displayName
+ ctx.Data["title"] = displayName
+
+ // lookup filename in wiki - get filecontent, gitTree entry , real filename
+ data, entry, _, noEntry := wikiContentsByName(ctx, commit, pageName)
+ if noEntry {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/?action=_pages")
+ }
+ if entry == nil || ctx.Written() {
+ return
+ }
+
+ ctx.Data["content"] = string(data)
+ ctx.Data["sidebarPresent"] = false
+ ctx.Data["sidebarContent"] = ""
+ ctx.Data["footerPresent"] = false
+ ctx.Data["footerContent"] = ""
+}
+
+// WikiPost renders post of wiki page
+func WikiPost(ctx *context.Context) {
+ switch ctx.FormString("action") {
+ case "_new":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ NewWikiPost(ctx)
+ return
+ case "_delete":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ DeleteWikiPagePost(ctx)
+ return
+ }
+
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ EditWikiPost(ctx)
+}
+
+// Wiki renders single wiki page
+func Wiki(ctx *context.Context) {
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ switch ctx.FormString("action") {
+ case "_pages":
+ WikiPages(ctx)
+ return
+ case "_revision":
+ WikiRevision(ctx)
+ return
+ case "_edit":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ EditWiki(ctx)
+ return
+ case "_new":
+ if !ctx.Repo.CanWrite(unit.TypeWiki) {
+ ctx.NotFound(ctx.Req.URL.RequestURI(), nil)
+ return
+ }
+ NewWiki(ctx)
+ return
+ }
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiRepo, entry := renderViewPage(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+ if ctx.Written() {
+ return
+ }
+ if entry == nil {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiPath := entry.Name()
+ if markup.Type(wikiPath) != markdown.MarkupName {
+ ext := strings.ToUpper(filepath.Ext(wikiPath))
+ ctx.Data["FormatWarning"] = fmt.Sprintf("%s rendering is not supported at the moment. Rendered as Markdown.", ext)
+ }
+ // Get last change information.
+ lastCommit, err := wikiRepo.GetCommitByPath(wikiPath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+ ctx.Data["Author"] = lastCommit.Author
+
+ ctx.HTML(http.StatusOK, tplWikiView)
+}
+
+// WikiRevision renders file revision list of wiki page
+func WikiRevision(ctx *context.Context) {
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ wikiRepo, entry := renderRevisionPage(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ if ctx.Written() {
+ return
+ }
+ if entry == nil {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki")
+ ctx.HTML(http.StatusOK, tplWikiStart)
+ return
+ }
+
+ // Get last change information.
+ wikiPath := entry.Name()
+ lastCommit, err := wikiRepo.GetCommitByPath(wikiPath)
+ if err != nil {
+ ctx.ServerError("GetCommitByPath", err)
+ return
+ }
+ ctx.Data["Author"] = lastCommit.Author
+
+ ctx.HTML(http.StatusOK, tplWikiRevision)
+}
+
+// WikiPages render wiki pages list page
+func WikiPages(ctx *context.Context) {
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki")
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.pages")
+ ctx.Data["CanWriteWiki"] = ctx.Repo.CanWrite(unit.TypeWiki) && !ctx.Repo.Repository.IsArchived
+
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ if err != nil {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ return
+ }
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ entries, err := commit.ListEntries()
+ if err != nil {
+ ctx.ServerError("ListEntries", err)
+ return
+ }
+ pages := make([]PageMeta, 0, len(entries))
+ for _, entry := range entries {
+ if !entry.IsRegular() {
+ continue
+ }
+ c, err := wikiRepo.GetCommitByPath(entry.Name())
+ if err != nil {
+ ctx.ServerError("GetCommit", err)
+ return
+ }
+ wikiName, err := wiki_service.GitPathToWebPath(entry.Name())
+ if err != nil {
+ if repo_model.IsErrWikiInvalidFileName(err) {
+ continue
+ }
+ ctx.ServerError("WikiFilenameToName", err)
+ return
+ }
+ _, displayName := wiki_service.WebPathToUserTitle(wikiName)
+ pages = append(pages, PageMeta{
+ Name: displayName,
+ SubURL: wiki_service.WebPathToURLPath(wikiName),
+ GitEntryName: entry.Name(),
+ UpdatedUnix: timeutil.TimeStamp(c.Author.When.Unix()),
+ })
+ }
+ ctx.Data["Pages"] = pages
+
+ ctx.HTML(http.StatusOK, tplWikiPages)
+}
+
+// WikiRaw outputs raw blob requested by user (image for example)
+func WikiRaw(ctx *context.Context) {
+ wikiRepo, commit, err := findWikiRepoCommit(ctx)
+ defer func() {
+ if wikiRepo != nil {
+ wikiRepo.Close()
+ }
+ }()
+
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ ctx.NotFound("findEntryForFile", nil)
+ return
+ }
+ ctx.ServerError("findEntryForfile", err)
+ return
+ }
+
+ providedWebPath := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ providedGitPath := wiki_service.WebPathToGitPath(providedWebPath)
+ var entry *git.TreeEntry
+ if commit != nil {
+ // Try to find a file with that name
+ entry, err = findEntryForFile(commit, providedGitPath)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findFile", err)
+ return
+ }
+
+ if entry == nil {
+ // Try to find a wiki page with that name
+ providedGitPath = strings.TrimSuffix(providedGitPath, ".md")
+ entry, err = findEntryForFile(commit, providedGitPath)
+ if err != nil && !git.IsErrNotExist(err) {
+ ctx.ServerError("findFile", err)
+ return
+ }
+ }
+ }
+
+ if entry != nil {
+ if err = common.ServeBlob(ctx.Base, ctx.Repo.TreePath, entry.Blob(), nil); err != nil {
+ ctx.ServerError("ServeBlob", err)
+ }
+ return
+ }
+
+ ctx.NotFound("findEntryForFile", nil)
+}
+
+// NewWiki render wiki create page
+func NewWiki(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Data["title"] = "Home"
+ }
+ if ctx.FormString("title") != "" {
+ ctx.Data["title"] = ctx.FormString("title")
+ }
+
+ ctx.HTML(http.StatusOK, tplWikiNew)
+}
+
+// NewWikiPost response for wiki create request
+func NewWikiPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewWikiForm)
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplWikiNew)
+ return
+ }
+
+ if util.IsEmptyString(form.Title) {
+ ctx.RenderWithErr(ctx.Tr("repo.issues.new.title_empty"), tplWikiNew, form)
+ return
+ }
+
+ wikiName := wiki_service.UserTitleToWebPath("", form.Title)
+
+ if len(form.Message) == 0 {
+ form.Message = ctx.Locale.TrString("repo.editor.add", form.Title)
+ }
+
+ if err := wiki_service.AddWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName, form.Content, form.Message); err != nil {
+ if repo_model.IsErrWikiReservedName(err) {
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.wiki.reserved_page", wikiName), tplWikiNew, &form)
+ } else if repo_model.IsErrWikiAlreadyExist(err) {
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("repo.wiki.page_already_exists"), tplWikiNew, &form)
+ } else {
+ ctx.ServerError("AddWikiPage", err)
+ }
+ return
+ }
+
+ notify_service.NewWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(wikiName), form.Message)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + wiki_service.WebPathToURLPath(wikiName))
+}
+
+// EditWiki render wiki modify page
+func EditWiki(ctx *context.Context) {
+ ctx.Data["PageIsWikiEdit"] = true
+
+ if !ctx.Repo.Repository.HasWiki() {
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki")
+ return
+ }
+
+ renderEditPage(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplWikiNew)
+}
+
+// EditWikiPost response for wiki modify request
+func EditWikiPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewWikiForm)
+ ctx.Data["Title"] = ctx.Tr("repo.wiki.new_page")
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplWikiNew)
+ return
+ }
+
+ oldWikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ newWikiName := wiki_service.UserTitleToWebPath("", form.Title)
+
+ if len(form.Message) == 0 {
+ form.Message = ctx.Locale.TrString("repo.editor.update", form.Title)
+ }
+
+ if err := wiki_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, oldWikiName, newWikiName, form.Content, form.Message); err != nil {
+ ctx.ServerError("EditWikiPage", err)
+ return
+ }
+
+ notify_service.EditWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(newWikiName), form.Message)
+
+ ctx.Redirect(ctx.Repo.RepoLink + "/wiki/" + wiki_service.WebPathToURLPath(newWikiName))
+}
+
+// DeleteWikiPagePost delete wiki page
+func DeleteWikiPagePost(ctx *context.Context) {
+ wikiName := wiki_service.WebPathFromRequest(ctx.PathParamRaw("*"))
+ if len(wikiName) == 0 {
+ wikiName = "Home"
+ }
+
+ if err := wiki_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, wikiName); err != nil {
+ ctx.ServerError("DeleteWikiPage", err)
+ return
+ }
+
+ notify_service.DeleteWikiPage(ctx, ctx.Doer, ctx.Repo.Repository, string(wikiName))
+
+ ctx.JSONRedirect(ctx.Repo.RepoLink + "/wiki/")
+}
+
+func WikiSearchContent(ctx *context.Context) {
+ keyword := ctx.FormTrim("q")
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplWikiSearch)
+ return
+ }
+
+ res, err := wiki_service.SearchWikiContents(ctx, ctx.Repo.Repository, keyword)
+ if err != nil {
+ ctx.ServerError("SearchWikiContents", err)
+ return
+ }
+
+ ctx.Data["Results"] = res
+ ctx.HTML(http.StatusOK, tplWikiSearch)
+}
diff --git a/routers/web/repo/wiki_test.go b/routers/web/repo/wiki_test.go
new file mode 100644
index 0000000..00a35a5
--- /dev/null
+++ b/routers/web/repo/wiki_test.go
@@ -0,0 +1,224 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "io"
+ "net/http"
+ "net/url"
+ "testing"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+ wiki_service "code.gitea.io/gitea/services/wiki"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+const (
+ content = "Wiki contents for unit tests"
+ message = "Wiki commit message for unit tests"
+)
+
+func wikiEntry(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) *git.TreeEntry {
+ wikiRepo, err := gitrepo.OpenWikiRepository(git.DefaultContext, repo)
+ require.NoError(t, err)
+ defer wikiRepo.Close()
+ commit, err := wikiRepo.GetBranchCommit("master")
+ require.NoError(t, err)
+ entries, err := commit.ListEntries()
+ require.NoError(t, err)
+ for _, entry := range entries {
+ if entry.Name() == wiki_service.WebPathToGitPath(wikiName) {
+ return entry
+ }
+ }
+ return nil
+}
+
+func wikiContent(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) string {
+ entry := wikiEntry(t, repo, wikiName)
+ if !assert.NotNil(t, entry) {
+ return ""
+ }
+ reader, err := entry.Blob().DataAsync()
+ require.NoError(t, err)
+ defer reader.Close()
+ bytes, err := io.ReadAll(reader)
+ require.NoError(t, err)
+ return string(bytes)
+}
+
+func assertWikiExists(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) {
+ assert.NotNil(t, wikiEntry(t, repo, wikiName))
+}
+
+func assertWikiNotExists(t *testing.T, repo *repo_model.Repository, wikiName wiki_service.WebPath) {
+ assert.Nil(t, wikiEntry(t, repo, wikiName))
+}
+
+func assertPagesMetas(t *testing.T, expectedNames []string, metas any) {
+ pageMetas, ok := metas.([]PageMeta)
+ if !assert.True(t, ok) {
+ return
+ }
+ if !assert.Len(t, pageMetas, len(expectedNames)) {
+ return
+ }
+ for i, pageMeta := range pageMetas {
+ assert.EqualValues(t, expectedNames[i], pageMeta.Name)
+ }
+}
+
+func TestWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadRepo(t, ctx, 1)
+ Wiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, "Home", ctx.Data["Title"])
+ assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
+}
+
+func TestWikiPages(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_pages")
+ contexttest.LoadRepo(t, ctx, 1)
+ WikiPages(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assertPagesMetas(t, []string{"Home", "Page With Image", "Page With Spaced Name", "Unescaped File"}, ctx.Data["Pages"])
+}
+
+func TestNewWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ NewWiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, ctx.Tr("repo.wiki.new_page"), ctx.Data["Title"])
+}
+
+func TestNewWikiPost(t *testing.T) {
+ for _, title := range []string{
+ "New page",
+ "&&&&",
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: title,
+ Content: content,
+ Message: message,
+ })
+ NewWikiPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
+ assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
+ }
+}
+
+func TestNewWikiPost_ReservedName(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_new")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: "_edit",
+ Content: content,
+ Message: message,
+ })
+ NewWikiPost(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, ctx.Tr("repo.wiki.reserved_page"), ctx.Flash.ErrorMsg)
+ assertWikiNotExists(t, ctx.Repo.Repository, "_edit")
+}
+
+func TestEditWiki(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_edit")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ EditWiki(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, "Home", ctx.Data["Title"])
+ assert.Equal(t, wikiContent(t, ctx.Repo.Repository, "Home"), ctx.Data["content"])
+}
+
+func TestEditWikiPost(t *testing.T) {
+ for _, title := range []string{
+ "Home",
+ "New/<page>",
+ } {
+ unittest.PrepareTestEnv(t)
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_new")
+ ctx.SetParams("*", "Home")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ web.SetForm(ctx, &forms.NewWikiForm{
+ Title: title,
+ Content: content,
+ Message: message,
+ })
+ EditWikiPost(ctx)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
+ assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
+ if title != "Home" {
+ assertWikiNotExists(t, ctx.Repo.Repository, "Home")
+ }
+ }
+}
+
+func TestDeleteWikiPagePost(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/Home?action=_delete")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ DeleteWikiPagePost(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assertWikiNotExists(t, ctx.Repo.Repository, "Home")
+}
+
+func TestWikiRaw(t *testing.T) {
+ for filepath, filetype := range map[string]string{
+ "jpeg.jpg": "image/jpeg",
+ "images/jpeg.jpg": "image/jpeg",
+ "Page With Spaced Name": "text/plain; charset=utf-8",
+ "Page-With-Spaced-Name": "text/plain; charset=utf-8",
+ "Page With Spaced Name.md": "", // there is no "Page With Spaced Name.md" in repo
+ "Page-With-Spaced-Name.md": "text/plain; charset=utf-8",
+ } {
+ unittest.PrepareTestEnv(t)
+
+ ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/raw/"+url.PathEscape(filepath))
+ ctx.SetParams("*", filepath)
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+ WikiRaw(ctx)
+ if filetype == "" {
+ assert.EqualValues(t, http.StatusNotFound, ctx.Resp.Status(), "filepath: %s", filepath)
+ } else {
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status(), "filepath: %s", filepath)
+ assert.EqualValues(t, filetype, ctx.Resp.Header().Get("Content-Type"), "filepath: %s", filepath)
+ }
+ }
+}
diff --git a/routers/web/shared/actions/runners.go b/routers/web/shared/actions/runners.go
new file mode 100644
index 0000000..f389332
--- /dev/null
+++ b/routers/web/shared/actions/runners.go
@@ -0,0 +1,161 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ "errors"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// RunnersList prepares data for runners list
+func RunnersList(ctx *context.Context, opts actions_model.FindRunnerOptions) {
+ runners, count, err := db.FindAndCount[actions_model.ActionRunner](ctx, opts)
+ if err != nil {
+ ctx.ServerError("CountRunners", err)
+ return
+ }
+
+ if err := actions_model.RunnerList(runners).LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+
+ // ownid=0,repo_id=0,means this token is used for global
+ var token *actions_model.ActionRunnerToken
+ token, err = actions_model.GetLatestRunnerToken(ctx, opts.OwnerID, opts.RepoID)
+ if errors.Is(err, util.ErrNotExist) || (token != nil && !token.IsActive) {
+ token, err = actions_model.NewRunnerToken(ctx, opts.OwnerID, opts.RepoID)
+ if err != nil {
+ ctx.ServerError("CreateRunnerToken", err)
+ return
+ }
+ } else if err != nil {
+ ctx.ServerError("GetLatestRunnerToken", err)
+ return
+ }
+
+ ctx.Data["Keyword"] = opts.Filter
+ ctx.Data["Runners"] = runners
+ ctx.Data["Total"] = count
+ ctx.Data["RegistrationToken"] = token.Token
+ ctx.Data["RunnerOwnerID"] = opts.OwnerID
+ ctx.Data["RunnerRepoID"] = opts.RepoID
+ ctx.Data["SortType"] = opts.Sort
+
+ pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
+
+ ctx.Data["Page"] = pager
+}
+
+// RunnerDetails prepares data for runners edit page
+func RunnerDetails(ctx *context.Context, page int, runnerID, ownerID, repoID int64) {
+ runner, err := actions_model.GetRunnerByID(ctx, runnerID)
+ if err != nil {
+ ctx.ServerError("GetRunnerByID", err)
+ return
+ }
+ if err := runner.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ if !runner.Editable(ownerID, repoID) {
+ err = errors.New("no permission to edit this runner")
+ ctx.NotFound("RunnerDetails", err)
+ return
+ }
+
+ ctx.Data["Runner"] = runner
+
+ opts := actions_model.FindTaskOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: 30,
+ },
+ Status: actions_model.StatusUnknown, // Unknown means all
+ RunnerID: runner.ID,
+ }
+
+ tasks, count, err := db.FindAndCount[actions_model.ActionTask](ctx, opts)
+ if err != nil {
+ ctx.ServerError("CountTasks", err)
+ return
+ }
+
+ if err = actions_model.TaskList(tasks).LoadAttributes(ctx); err != nil {
+ ctx.ServerError("TasksLoadAttributes", err)
+ return
+ }
+
+ ctx.Data["Tasks"] = tasks
+ pager := context.NewPagination(int(count), opts.PageSize, opts.Page, 5)
+ ctx.Data["Page"] = pager
+}
+
+// RunnerDetailsEditPost response for edit runner details
+func RunnerDetailsEditPost(ctx *context.Context, runnerID, ownerID, repoID int64, redirectTo string) {
+ runner, err := actions_model.GetRunnerByID(ctx, runnerID)
+ if err != nil {
+ log.Warn("RunnerDetailsEditPost.GetRunnerByID failed: %v, url: %s", err, ctx.Req.URL)
+ ctx.ServerError("RunnerDetailsEditPost.GetRunnerByID", err)
+ return
+ }
+ if !runner.Editable(ownerID, repoID) {
+ ctx.NotFound("RunnerDetailsEditPost.Editable", util.NewPermissionDeniedErrorf("no permission to edit this runner"))
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.EditRunnerForm)
+ runner.Description = form.Description
+
+ err = actions_model.UpdateRunner(ctx, runner, "description")
+ if err != nil {
+ log.Warn("RunnerDetailsEditPost.UpdateRunner failed: %v, url: %s", err, ctx.Req.URL)
+ ctx.Flash.Warning(ctx.Tr("actions.runners.update_runner_failed"))
+ ctx.Redirect(redirectTo)
+ return
+ }
+
+ log.Debug("RunnerDetailsEditPost success: %s", ctx.Req.URL)
+
+ ctx.Flash.Success(ctx.Tr("actions.runners.update_runner_success"))
+ ctx.Redirect(redirectTo)
+}
+
+// RunnerResetRegistrationToken reset registration token
+func RunnerResetRegistrationToken(ctx *context.Context, ownerID, repoID int64, redirectTo string) {
+ _, err := actions_model.NewRunnerToken(ctx, ownerID, repoID)
+ if err != nil {
+ ctx.ServerError("ResetRunnerRegistrationToken", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("actions.runners.reset_registration_token_success"))
+ ctx.Redirect(redirectTo)
+}
+
+// RunnerDeletePost response for deleting a runner
+func RunnerDeletePost(ctx *context.Context, runnerID int64,
+ successRedirectTo, failedRedirectTo string,
+) {
+ if err := actions_model.DeleteRunner(ctx, runnerID); err != nil {
+ log.Warn("DeleteRunnerPost.UpdateRunner failed: %v, url: %s", err, ctx.Req.URL)
+ ctx.Flash.Warning(ctx.Tr("actions.runners.delete_runner_failed"))
+
+ ctx.JSONRedirect(failedRedirectTo)
+ return
+ }
+
+ log.Info("DeleteRunnerPost success: %s", ctx.Req.URL)
+
+ ctx.Flash.Success(ctx.Tr("actions.runners.delete_runner_success"))
+
+ ctx.JSONRedirect(successRedirectTo)
+}
diff --git a/routers/web/shared/actions/variables.go b/routers/web/shared/actions/variables.go
new file mode 100644
index 0000000..79c03e4
--- /dev/null
+++ b/routers/web/shared/actions/variables.go
@@ -0,0 +1,65 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package actions
+
+import (
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/web"
+ actions_service "code.gitea.io/gitea/services/actions"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+func SetVariablesContext(ctx *context.Context, ownerID, repoID int64) {
+ variables, err := db.Find[actions_model.ActionVariable](ctx, actions_model.FindVariablesOpts{
+ OwnerID: ownerID,
+ RepoID: repoID,
+ })
+ if err != nil {
+ ctx.ServerError("FindVariables", err)
+ return
+ }
+ ctx.Data["Variables"] = variables
+}
+
+func CreateVariable(ctx *context.Context, ownerID, repoID int64, redirectURL string) {
+ form := web.GetForm(ctx).(*forms.EditVariableForm)
+
+ v, err := actions_service.CreateVariable(ctx, ownerID, repoID, form.Name, form.Data)
+ if err != nil {
+ log.Error("CreateVariable: %v", err)
+ ctx.JSONError(ctx.Tr("actions.variables.creation.failed"))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("actions.variables.creation.success", v.Name))
+ ctx.JSONRedirect(redirectURL)
+}
+
+func UpdateVariable(ctx *context.Context, redirectURL string) {
+ id := ctx.ParamsInt64(":variable_id")
+ form := web.GetForm(ctx).(*forms.EditVariableForm)
+
+ if ok, err := actions_service.UpdateVariable(ctx, id, form.Name, form.Data); err != nil || !ok {
+ log.Error("UpdateVariable: %v", err)
+ ctx.JSONError(ctx.Tr("actions.variables.update.failed"))
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("actions.variables.update.success"))
+ ctx.JSONRedirect(redirectURL)
+}
+
+func DeleteVariable(ctx *context.Context, redirectURL string) {
+ id := ctx.ParamsInt64(":variable_id")
+
+ if err := actions_service.DeleteVariableByID(ctx, id); err != nil {
+ log.Error("Delete variable [%d] failed: %v", id, err)
+ ctx.JSONError(ctx.Tr("actions.variables.deletion.failed"))
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("actions.variables.deletion.success"))
+ ctx.JSONRedirect(redirectURL)
+}
diff --git a/routers/web/shared/packages/packages.go b/routers/web/shared/packages/packages.go
new file mode 100644
index 0000000..af960f1
--- /dev/null
+++ b/routers/web/shared/packages/packages.go
@@ -0,0 +1,260 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package packages
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ cargo_service "code.gitea.io/gitea/services/packages/cargo"
+ container_service "code.gitea.io/gitea/services/packages/container"
+)
+
+func SetPackagesContext(ctx *context.Context, owner *user_model.User) {
+ pcrs, err := packages_model.GetCleanupRulesByOwner(ctx, owner.ID)
+ if err != nil {
+ ctx.ServerError("GetCleanupRulesByOwner", err)
+ return
+ }
+
+ ctx.Data["CleanupRules"] = pcrs
+
+ ctx.Data["CargoIndexExists"], err = repo_model.IsRepositoryModelExist(ctx, owner, cargo_service.IndexRepositoryName)
+ if err != nil {
+ ctx.ServerError("IsRepositoryModelExist", err)
+ return
+ }
+}
+
+func SetRuleAddContext(ctx *context.Context) {
+ setRuleEditContext(ctx, nil)
+}
+
+func SetRuleEditContext(ctx *context.Context, owner *user_model.User) {
+ pcr := getCleanupRuleByContext(ctx, owner)
+ if pcr == nil {
+ return
+ }
+
+ setRuleEditContext(ctx, pcr)
+}
+
+func setRuleEditContext(ctx *context.Context, pcr *packages_model.PackageCleanupRule) {
+ ctx.Data["IsEditRule"] = pcr != nil
+
+ if pcr == nil {
+ pcr = &packages_model.PackageCleanupRule{}
+ }
+ ctx.Data["CleanupRule"] = pcr
+ ctx.Data["AvailableTypes"] = packages_model.TypeList
+}
+
+func PerformRuleAddPost(ctx *context.Context, owner *user_model.User, redirectURL string, template base.TplName) {
+ performRuleEditPost(ctx, owner, nil, redirectURL, template)
+}
+
+func PerformRuleEditPost(ctx *context.Context, owner *user_model.User, redirectURL string, template base.TplName) {
+ pcr := getCleanupRuleByContext(ctx, owner)
+ if pcr == nil {
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.PackageCleanupRuleForm)
+
+ if form.Action == "remove" {
+ if err := packages_model.DeleteCleanupRuleByID(ctx, pcr.ID); err != nil {
+ ctx.ServerError("DeleteCleanupRuleByID", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("packages.owner.settings.cleanuprules.success.delete"))
+ ctx.Redirect(redirectURL)
+ } else {
+ performRuleEditPost(ctx, owner, pcr, redirectURL, template)
+ }
+}
+
+func performRuleEditPost(ctx *context.Context, owner *user_model.User, pcr *packages_model.PackageCleanupRule, redirectURL string, template base.TplName) {
+ isEditRule := pcr != nil
+
+ if pcr == nil {
+ pcr = &packages_model.PackageCleanupRule{}
+ }
+
+ form := web.GetForm(ctx).(*forms.PackageCleanupRuleForm)
+
+ pcr.Enabled = form.Enabled
+ pcr.OwnerID = owner.ID
+ pcr.KeepCount = form.KeepCount
+ pcr.KeepPattern = form.KeepPattern
+ pcr.RemoveDays = form.RemoveDays
+ pcr.RemovePattern = form.RemovePattern
+ pcr.MatchFullName = form.MatchFullName
+
+ ctx.Data["IsEditRule"] = isEditRule
+ ctx.Data["CleanupRule"] = pcr
+ ctx.Data["AvailableTypes"] = packages_model.TypeList
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, template)
+ return
+ }
+
+ if isEditRule {
+ if err := packages_model.UpdateCleanupRule(ctx, pcr); err != nil {
+ ctx.ServerError("UpdateCleanupRule", err)
+ return
+ }
+ } else {
+ pcr.Type = packages_model.Type(form.Type)
+
+ if has, err := packages_model.HasOwnerCleanupRuleForPackageType(ctx, owner.ID, pcr.Type); err != nil {
+ ctx.ServerError("HasOwnerCleanupRuleForPackageType", err)
+ return
+ } else if has {
+ ctx.Data["Err_Type"] = true
+ ctx.HTML(http.StatusOK, template)
+ return
+ }
+
+ var err error
+ if pcr, err = packages_model.InsertCleanupRule(ctx, pcr); err != nil {
+ ctx.ServerError("InsertCleanupRule", err)
+ return
+ }
+ }
+
+ ctx.Flash.Success(ctx.Tr("packages.owner.settings.cleanuprules.success.update"))
+ ctx.Redirect(fmt.Sprintf("%s/rules/%d", redirectURL, pcr.ID))
+}
+
+func SetRulePreviewContext(ctx *context.Context, owner *user_model.User) {
+ pcr := getCleanupRuleByContext(ctx, owner)
+ if pcr == nil {
+ return
+ }
+
+ if err := pcr.CompiledPattern(); err != nil {
+ ctx.ServerError("CompiledPattern", err)
+ return
+ }
+
+ olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays)
+
+ packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type)
+ if err != nil {
+ ctx.ServerError("GetPackagesByType", err)
+ return
+ }
+
+ versionsToRemove := make([]*packages_model.PackageDescriptor, 0, 10)
+
+ for _, p := range packages {
+ pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ PackageID: p.ID,
+ IsInternal: optional.Some(false),
+ Sort: packages_model.SortCreatedDesc,
+ Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200),
+ })
+ if err != nil {
+ ctx.ServerError("SearchVersions", err)
+ return
+ }
+ for _, pv := range pvs {
+ if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil {
+ ctx.ServerError("ShouldBeSkipped", err)
+ return
+ } else if skip {
+ continue
+ }
+
+ toMatch := pv.LowerVersion
+ if pcr.MatchFullName {
+ toMatch = p.LowerName + "/" + pv.LowerVersion
+ }
+
+ if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) {
+ continue
+ }
+ if pv.CreatedUnix.AsLocalTime().After(olderThan) {
+ continue
+ }
+ if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) {
+ continue
+ }
+
+ pd, err := packages_model.GetPackageDescriptor(ctx, pv)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptor", err)
+ return
+ }
+ versionsToRemove = append(versionsToRemove, pd)
+ }
+ }
+
+ ctx.Data["CleanupRule"] = pcr
+ ctx.Data["VersionsToRemove"] = versionsToRemove
+}
+
+func getCleanupRuleByContext(ctx *context.Context, owner *user_model.User) *packages_model.PackageCleanupRule {
+ id := ctx.FormInt64("id")
+ if id == 0 {
+ id = ctx.ParamsInt64("id")
+ }
+
+ pcr, err := packages_model.GetCleanupRuleByID(ctx, id)
+ if err != nil {
+ if err == packages_model.ErrPackageCleanupRuleNotExist {
+ ctx.NotFound("", err)
+ } else {
+ ctx.ServerError("GetCleanupRuleByID", err)
+ }
+ return nil
+ }
+
+ if pcr != nil && pcr.OwnerID == owner.ID {
+ return pcr
+ }
+
+ ctx.NotFound("", fmt.Errorf("PackageCleanupRule[%v] not associated to owner %v", id, owner))
+
+ return nil
+}
+
+func InitializeCargoIndex(ctx *context.Context, owner *user_model.User) {
+ err := cargo_service.InitializeIndexRepository(ctx, owner, owner)
+ if err != nil {
+ log.Error("InitializeIndexRepository failed: %v", err)
+ ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.initialize.error", err))
+ } else {
+ ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.initialize.success"))
+ }
+}
+
+func RebuildCargoIndex(ctx *context.Context, owner *user_model.User) {
+ err := cargo_service.RebuildIndex(ctx, owner, owner)
+ if err != nil {
+ log.Error("RebuildIndex failed: %v", err)
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.rebuild.no_index"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.rebuild.error", err))
+ }
+ } else {
+ ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.rebuild.success"))
+ }
+}
diff --git a/routers/web/shared/project/column.go b/routers/web/shared/project/column.go
new file mode 100644
index 0000000..599842e
--- /dev/null
+++ b/routers/web/shared/project/column.go
@@ -0,0 +1,48 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package project
+
+import (
+ project_model "code.gitea.io/gitea/models/project"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/services/context"
+)
+
+// MoveColumns moves or keeps columns in a project and sorts them inside that project
+func MoveColumns(ctx *context.Context) {
+ project, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
+ if err != nil {
+ ctx.NotFoundOrServerError("GetProjectByID", project_model.IsErrProjectNotExist, err)
+ return
+ }
+ if !project.CanBeAccessedByOwnerRepo(ctx.ContextUser.ID, ctx.Repo.Repository) {
+ ctx.NotFound("CanBeAccessedByOwnerRepo", nil)
+ return
+ }
+
+ type movedColumnsForm struct {
+ Columns []struct {
+ ColumnID int64 `json:"columnID"`
+ Sorting int64 `json:"sorting"`
+ } `json:"columns"`
+ }
+
+ form := &movedColumnsForm{}
+ if err = json.NewDecoder(ctx.Req.Body).Decode(&form); err != nil {
+ ctx.ServerError("DecodeMovedColumnsForm", err)
+ return
+ }
+
+ sortedColumnIDs := make(map[int64]int64)
+ for _, column := range form.Columns {
+ sortedColumnIDs[column.Sorting] = column.ColumnID
+ }
+
+ if err = project_model.MoveColumnsOnProject(ctx, project, sortedColumnIDs); err != nil {
+ ctx.ServerError("MoveColumnsOnProject", err)
+ return
+ }
+
+ ctx.JSONOK()
+}
diff --git a/routers/web/shared/secrets/secrets.go b/routers/web/shared/secrets/secrets.go
new file mode 100644
index 0000000..3bd421f
--- /dev/null
+++ b/routers/web/shared/secrets/secrets.go
@@ -0,0 +1,53 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package secrets
+
+import (
+ "code.gitea.io/gitea/models/db"
+ secret_model "code.gitea.io/gitea/models/secret"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ secret_service "code.gitea.io/gitea/services/secrets"
+)
+
+func SetSecretsContext(ctx *context.Context, ownerID, repoID int64) {
+ secrets, err := db.Find[secret_model.Secret](ctx, secret_model.FindSecretsOptions{OwnerID: ownerID, RepoID: repoID})
+ if err != nil {
+ ctx.ServerError("FindSecrets", err)
+ return
+ }
+
+ ctx.Data["Secrets"] = secrets
+}
+
+func PerformSecretsPost(ctx *context.Context, ownerID, repoID int64, redirectURL string) {
+ form := web.GetForm(ctx).(*forms.AddSecretForm)
+
+ s, _, err := secret_service.CreateOrUpdateSecret(ctx, ownerID, repoID, form.Name, util.ReserveLineBreakForTextarea(form.Data))
+ if err != nil {
+ log.Error("CreateOrUpdateSecret failed: %v", err)
+ ctx.JSONError(ctx.Tr("secrets.creation.failed"))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("secrets.creation.success", s.Name))
+ ctx.JSONRedirect(redirectURL)
+}
+
+func PerformSecretsDelete(ctx *context.Context, ownerID, repoID int64, redirectURL string) {
+ id := ctx.FormInt64("id")
+
+ err := secret_service.DeleteSecretByID(ctx, ownerID, repoID, id)
+ if err != nil {
+ log.Error("DeleteSecretByID(%d) failed: %v", id, err)
+ ctx.JSONError(ctx.Tr("secrets.deletion.failed"))
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("secrets.deletion.success"))
+ ctx.JSONRedirect(redirectURL)
+}
diff --git a/routers/web/shared/user/header.go b/routers/web/shared/user/header.go
new file mode 100644
index 0000000..fd7605c
--- /dev/null
+++ b/routers/web/shared/user/header.go
@@ -0,0 +1,163 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/url"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ project_model "code.gitea.io/gitea/models/project"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// prepareContextForCommonProfile store some common data into context data for user's profile related pages (including the nav menu)
+// It is designed to be fast and safe to be called multiple times in one request
+func prepareContextForCommonProfile(ctx *context.Context) {
+ ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["EnableFeed"] = setting.Other.EnableFeed
+ ctx.Data["FeedURL"] = ctx.ContextUser.HomeLink()
+}
+
+// PrepareContextForProfileBigAvatar set the context for big avatar view on the profile page
+func PrepareContextForProfileBigAvatar(ctx *context.Context) {
+ prepareContextForCommonProfile(ctx)
+
+ ctx.Data["IsBlocked"] = ctx.Doer != nil && user_model.IsBlocked(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ ctx.Data["ShowUserEmail"] = setting.UI.ShowUserEmail && ctx.ContextUser.Email != "" && ctx.IsSigned && !ctx.ContextUser.KeepEmailPrivate
+ if setting.Service.UserLocationMapURL != "" {
+ ctx.Data["ContextUserLocationMapURL"] = setting.Service.UserLocationMapURL + url.QueryEscape(ctx.ContextUser.Location)
+ }
+ // Show OpenID URIs
+ openIDs, err := user_model.GetUserOpenIDs(ctx, ctx.ContextUser.ID)
+ if err != nil {
+ ctx.ServerError("GetUserOpenIDs", err)
+ return
+ }
+ ctx.Data["OpenIDs"] = openIDs
+ if len(ctx.ContextUser.Description) != 0 {
+ content, err := markdown.RenderString(&markup.RenderContext{
+ Metas: map[string]string{"mode": "document"},
+ Ctx: ctx,
+ }, ctx.ContextUser.Description)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+ ctx.Data["RenderedDescription"] = content
+ }
+
+ showPrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == ctx.ContextUser.ID)
+ orgs, err := db.Find[organization.Organization](ctx, organization.FindOrgOptions{
+ UserID: ctx.ContextUser.ID,
+ IncludePrivate: showPrivate,
+ })
+ if err != nil {
+ ctx.ServerError("FindOrgs", err)
+ return
+ }
+ ctx.Data["Orgs"] = orgs
+ ctx.Data["HasOrgsVisible"] = organization.HasOrgsVisible(ctx, orgs, ctx.Doer)
+
+ badges, _, err := user_model.GetUserBadges(ctx, ctx.ContextUser)
+ if err != nil {
+ ctx.ServerError("GetUserBadges", err)
+ return
+ }
+ ctx.Data["Badges"] = badges
+
+ // in case the numbers are already provided by other functions, no need to query again (which is slow)
+ if _, ok := ctx.Data["NumFollowers"]; !ok {
+ _, ctx.Data["NumFollowers"], _ = user_model.GetUserFollowers(ctx, ctx.ContextUser, ctx.Doer, db.ListOptions{PageSize: 1, Page: 1})
+ }
+ if _, ok := ctx.Data["NumFollowing"]; !ok {
+ _, ctx.Data["NumFollowing"], _ = user_model.GetUserFollowing(ctx, ctx.ContextUser, ctx.Doer, db.ListOptions{PageSize: 1, Page: 1})
+ }
+}
+
+func FindUserProfileReadme(ctx *context.Context, doer *user_model.User) (profileDbRepo *repo_model.Repository, profileGitRepo *git.Repository, profileReadmeBlob *git.Blob, profileClose func()) {
+ profileDbRepo, err := repo_model.GetRepositoryByName(ctx, ctx.ContextUser.ID, ".profile")
+ if err == nil {
+ perm, err := access_model.GetUserRepoPermission(ctx, profileDbRepo, doer)
+ if err == nil && !profileDbRepo.IsEmpty && perm.CanRead(unit.TypeCode) {
+ if profileGitRepo, err = gitrepo.OpenRepository(ctx, profileDbRepo); err != nil {
+ log.Error("FindUserProfileReadme failed to OpenRepository: %v", err)
+ } else {
+ if commit, err := profileGitRepo.GetBranchCommit(profileDbRepo.DefaultBranch); err != nil {
+ log.Error("FindUserProfileReadme failed to GetBranchCommit: %v", err)
+ } else {
+ profileReadmeBlob, _ = commit.GetBlobByFoldedPath("README.md")
+ }
+ }
+ }
+ } else if !repo_model.IsErrRepoNotExist(err) {
+ log.Error("FindUserProfileReadme failed to GetRepositoryByName: %v", err)
+ }
+ return profileDbRepo, profileGitRepo, profileReadmeBlob, func() {
+ if profileGitRepo != nil {
+ _ = profileGitRepo.Close()
+ }
+ }
+}
+
+func RenderUserHeader(ctx *context.Context) {
+ prepareContextForCommonProfile(ctx)
+
+ _, _, profileReadmeBlob, profileClose := FindUserProfileReadme(ctx, ctx.Doer)
+ defer profileClose()
+ ctx.Data["HasProfileReadme"] = profileReadmeBlob != nil
+}
+
+func LoadHeaderCount(ctx *context.Context) error {
+ prepareContextForCommonProfile(ctx)
+
+ var err error
+
+ ctx.Data["RepoCount"], err = repo_model.CountRepository(ctx, &repo_model.SearchRepoOptions{
+ Actor: ctx.Doer,
+ OwnerID: ctx.ContextUser.ID,
+ Private: ctx.IsSigned,
+ Collaborate: optional.Some(false),
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ })
+ if err != nil {
+ return err
+ }
+
+ var projectType project_model.Type
+ if ctx.ContextUser.IsOrganization() {
+ projectType = project_model.TypeOrganization
+ } else {
+ projectType = project_model.TypeIndividual
+ }
+ ctx.Data["ProjectCount"], err = db.Count[project_model.Project](ctx, project_model.SearchOptions{
+ OwnerID: ctx.ContextUser.ID,
+ IsClosed: optional.Some(false),
+ Type: projectType,
+ })
+ if err != nil {
+ return err
+ }
+ ctx.Data["PackageCount"], err = packages_model.CountOwnerPackages(ctx, ctx.ContextUser.ID)
+ if err != nil {
+ return err
+ }
+
+ return nil
+}
diff --git a/routers/web/swagger_json.go b/routers/web/swagger_json.go
new file mode 100644
index 0000000..fc39b50
--- /dev/null
+++ b/routers/web/swagger_json.go
@@ -0,0 +1,13 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "code.gitea.io/gitea/services/context"
+)
+
+// SwaggerV1Json render swagger v1 json
+func SwaggerV1Json(ctx *context.Context) {
+ ctx.JSONTemplate("swagger/v1_json")
+}
diff --git a/routers/web/user/avatar.go b/routers/web/user/avatar.go
new file mode 100644
index 0000000..04f5101
--- /dev/null
+++ b/routers/web/user/avatar.go
@@ -0,0 +1,57 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/avatars"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/httpcache"
+ "code.gitea.io/gitea/services/context"
+)
+
+func cacheableRedirect(ctx *context.Context, location string) {
+ // here we should not use `setting.StaticCacheTime`, it is pretty long (default: 6 hours)
+ // we must make sure the redirection cache time is short enough, otherwise a user won't see the updated avatar in 6 hours
+ // it's OK to make the cache time short, it is only a redirection, and doesn't cost much to make a new request
+ httpcache.SetCacheControlInHeader(ctx.Resp.Header(), 5*time.Minute)
+ ctx.Redirect(location)
+}
+
+// AvatarByUserName redirect browser to user avatar of requested size
+func AvatarByUserName(ctx *context.Context) {
+ userName := ctx.Params(":username")
+ size := int(ctx.ParamsInt64(":size"))
+
+ var user *user_model.User
+ if strings.ToLower(userName) != user_model.GhostUserLowerName {
+ var err error
+ if user, err = user_model.GetUserByName(ctx, userName); err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.NotFound("GetUserByName", err)
+ return
+ }
+ ctx.ServerError("Invalid user: "+userName, err)
+ return
+ }
+ } else {
+ user = user_model.NewGhostUser()
+ }
+
+ cacheableRedirect(ctx, user.AvatarLinkWithSize(ctx, size))
+}
+
+// AvatarByEmailHash redirects the browser to the email avatar link
+func AvatarByEmailHash(ctx *context.Context) {
+ hash := ctx.Params(":hash")
+ email, err := avatars.GetEmailForHash(ctx, hash)
+ if err != nil {
+ ctx.ServerError("invalid avatar hash: "+hash, err)
+ return
+ }
+ size := ctx.FormInt("size")
+ cacheableRedirect(ctx, avatars.GenerateEmailAvatarFinalLink(ctx, email, size))
+}
diff --git a/routers/web/user/code.go b/routers/web/user/code.go
new file mode 100644
index 0000000..e2e8f25
--- /dev/null
+++ b/routers/web/user/code.go
@@ -0,0 +1,129 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/base"
+ code_indexer "code.gitea.io/gitea/modules/indexer/code"
+ "code.gitea.io/gitea/modules/setting"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplUserCode base.TplName = "user/code"
+)
+
+// CodeSearch render user/organization code search page
+func CodeSearch(ctx *context.Context) {
+ if !setting.Indexer.RepoIndexerEnabled {
+ ctx.Redirect(ctx.ContextUser.HomeLink())
+ return
+ }
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ shared_user.RenderUserHeader(ctx)
+
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
+ ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["Title"] = ctx.Tr("explore.code")
+
+ language := ctx.FormTrim("l")
+ keyword := ctx.FormTrim("q")
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Language"] = language
+ ctx.Data["IsFuzzy"] = isFuzzy
+ ctx.Data["IsCodePage"] = true
+
+ if keyword == "" {
+ ctx.HTML(http.StatusOK, tplUserCode)
+ return
+ }
+
+ var (
+ repoIDs []int64
+ err error
+ )
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ repoIDs, err = repo_model.FindUserCodeAccessibleOwnerRepoIDs(ctx, ctx.ContextUser.ID, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("FindUserCodeAccessibleOwnerRepoIDs", err)
+ return
+ }
+
+ var (
+ total int
+ searchResults []*code_indexer.Result
+ searchResultLanguages []*code_indexer.SearchResultLanguages
+ )
+
+ if len(repoIDs) > 0 {
+ total, searchResults, searchResultLanguages, err = code_indexer.PerformSearch(ctx, &code_indexer.SearchOptions{
+ RepoIDs: repoIDs,
+ Keyword: keyword,
+ IsKeywordFuzzy: isFuzzy,
+ Language: language,
+ Paginator: &db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.RepoSearchPagingNum,
+ },
+ })
+ if err != nil {
+ if code_indexer.IsAvailable(ctx) {
+ ctx.ServerError("SearchResults", err)
+ return
+ }
+ ctx.Data["CodeIndexerUnavailable"] = true
+ } else {
+ ctx.Data["CodeIndexerUnavailable"] = !code_indexer.IsAvailable(ctx)
+ }
+
+ loadRepoIDs := make([]int64, 0, len(searchResults))
+ for _, result := range searchResults {
+ var find bool
+ for _, id := range loadRepoIDs {
+ if id == result.RepoID {
+ find = true
+ break
+ }
+ }
+ if !find {
+ loadRepoIDs = append(loadRepoIDs, result.RepoID)
+ }
+ }
+
+ repoMaps, err := repo_model.GetRepositoriesMapByIDs(ctx, loadRepoIDs)
+ if err != nil {
+ ctx.ServerError("GetRepositoriesMapByIDs", err)
+ return
+ }
+
+ ctx.Data["RepoMaps"] = repoMaps
+ }
+ ctx.Data["SearchResults"] = searchResults
+ ctx.Data["SearchResultLanguages"] = searchResultLanguages
+
+ pager := context.NewPagination(total, setting.UI.RepoSearchPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "l", "Language")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplUserCode)
+}
diff --git a/routers/web/user/home.go b/routers/web/user/home.go
new file mode 100644
index 0000000..4b249e9
--- /dev/null
+++ b/routers/web/user/home.go
@@ -0,0 +1,883 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "bytes"
+ "fmt"
+ "net/http"
+ "regexp"
+ "slices"
+ "sort"
+ "strconv"
+ "strings"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ issue_indexer "code.gitea.io/gitea/modules/indexer/issues"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ pull_service "code.gitea.io/gitea/services/pull"
+
+ "github.com/ProtonMail/go-crypto/openpgp"
+ "github.com/ProtonMail/go-crypto/openpgp/armor"
+ "xorm.io/builder"
+)
+
+const (
+ tplDashboard base.TplName = "user/dashboard/dashboard"
+ tplIssues base.TplName = "user/dashboard/issues"
+ tplMilestones base.TplName = "user/dashboard/milestones"
+ tplProfile base.TplName = "user/profile"
+)
+
+// getDashboardContextUser finds out which context user dashboard is being viewed as .
+func getDashboardContextUser(ctx *context.Context) *user_model.User {
+ ctxUser := ctx.Doer
+ orgName := ctx.Params(":org")
+ if len(orgName) > 0 {
+ ctxUser = ctx.Org.Organization.AsUser()
+ ctx.Data["Teams"] = ctx.Org.Teams
+ }
+ ctx.Data["ContextUser"] = ctxUser
+
+ orgs, err := organization.GetUserOrgsList(ctx, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserOrgsList", err)
+ return nil
+ }
+ ctx.Data["Orgs"] = orgs
+
+ return ctxUser
+}
+
+// Dashboard render the dashboard page
+func Dashboard(ctx *context.Context) {
+ ctxUser := getDashboardContextUser(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ var (
+ date = ctx.FormString("date")
+ page = ctx.FormInt("page")
+ )
+
+ // Make sure page number is at least 1. Will be posted to ctx.Data.
+ if page <= 1 {
+ page = 1
+ }
+
+ ctx.Data["Title"] = ctxUser.DisplayName() + " - " + ctx.Locale.TrString("dashboard")
+ ctx.Data["PageIsDashboard"] = true
+ ctx.Data["PageIsNews"] = true
+ cnt, _ := organization.GetOrganizationCount(ctx, ctxUser)
+ ctx.Data["UserOrgsCount"] = cnt
+ ctx.Data["MirrorsEnabled"] = setting.Mirror.Enabled
+ ctx.Data["Date"] = date
+
+ var uid int64
+ if ctxUser != nil {
+ uid = ctxUser.ID
+ }
+
+ ctx.PageData["dashboardRepoList"] = map[string]any{
+ "searchLimit": setting.UI.User.RepoPagingNum,
+ "uid": uid,
+ }
+
+ if setting.Service.EnableUserHeatmap {
+ data, err := activities_model.GetUserHeatmapDataByUserTeam(ctx, ctxUser, ctx.Org.Team, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserHeatmapDataByUserTeam", err)
+ return
+ }
+ ctx.Data["HeatmapData"] = data
+ ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
+ }
+
+ feeds, count, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
+ RequestedUser: ctxUser,
+ RequestedTeam: ctx.Org.Team,
+ Actor: ctx.Doer,
+ IncludePrivate: true,
+ OnlyPerformedBy: false,
+ IncludeDeleted: false,
+ Date: ctx.FormString("date"),
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.FeedPagingNum,
+ },
+ })
+ if err != nil {
+ ctx.ServerError("GetFeeds", err)
+ return
+ }
+
+ ctx.Data["Feeds"] = feeds
+
+ pager := context.NewPagination(int(count), setting.UI.FeedPagingNum, page, 5)
+ pager.AddParam(ctx, "date", "Date")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplDashboard)
+}
+
+// Milestones render the user milestones page
+func Milestones(ctx *context.Context) {
+ if unit.TypeIssues.UnitGlobalDisabled() && unit.TypePullRequests.UnitGlobalDisabled() {
+ log.Debug("Milestones overview page not available as both issues and pull requests are globally disabled")
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("milestones")
+ ctx.Data["PageIsMilestonesDashboard"] = true
+
+ ctxUser := getDashboardContextUser(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ repoOpts := repo_model.SearchRepoOptions{
+ Actor: ctx.Doer,
+ OwnerID: ctxUser.ID,
+ Private: true,
+ AllPublic: false, // Include also all public repositories of users and public organisations
+ AllLimited: false, // Include also all public repositories of limited organisations
+ Archived: optional.Some(false),
+ HasMilestones: optional.Some(true), // Just needs display repos has milestones
+ }
+
+ if ctxUser.IsOrganization() && ctx.Org.Team != nil {
+ repoOpts.TeamID = ctx.Org.Team.ID
+ }
+
+ var (
+ userRepoCond = repo_model.SearchRepositoryCondition(&repoOpts) // all repo condition user could visit
+ repoCond = userRepoCond
+ repoIDs []int64
+
+ reposQuery = ctx.FormString("repos")
+ isShowClosed = ctx.FormString("state") == "closed"
+ sortType = ctx.FormString("sort")
+ page = ctx.FormInt("page")
+ keyword = ctx.FormTrim("q")
+ )
+
+ if page <= 1 {
+ page = 1
+ }
+
+ if len(reposQuery) != 0 {
+ if issueReposQueryPattern.MatchString(reposQuery) {
+ // remove "[" and "]" from string
+ reposQuery = reposQuery[1 : len(reposQuery)-1]
+ // for each ID (delimiter ",") add to int to repoIDs
+
+ for _, rID := range strings.Split(reposQuery, ",") {
+ // Ensure nonempty string entries
+ if rID != "" && rID != "0" {
+ rIDint64, err := strconv.ParseInt(rID, 10, 64)
+ // If the repo id specified by query is not parseable or not accessible by user, just ignore it.
+ if err == nil {
+ repoIDs = append(repoIDs, rIDint64)
+ }
+ }
+ }
+ if len(repoIDs) > 0 {
+ // Don't just let repoCond = builder.In("id", repoIDs) because user may has no permission on repoIDs
+ // But the original repoCond has a limitation
+ repoCond = repoCond.And(builder.In("id", repoIDs))
+ }
+ } else {
+ log.Warn("issueReposQueryPattern not match with query")
+ }
+ }
+
+ counts, err := issues_model.CountMilestonesMap(ctx, issues_model.FindMilestoneOptions{
+ RepoCond: userRepoCond,
+ Name: keyword,
+ IsClosed: optional.Some(isShowClosed),
+ })
+ if err != nil {
+ ctx.ServerError("CountMilestonesByRepoIDs", err)
+ return
+ }
+
+ milestones, err := db.Find[issues_model.Milestone](ctx, issues_model.FindMilestoneOptions{
+ ListOptions: db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ },
+ RepoCond: repoCond,
+ IsClosed: optional.Some(isShowClosed),
+ SortType: sortType,
+ Name: keyword,
+ })
+ if err != nil {
+ ctx.ServerError("SearchMilestones", err)
+ return
+ }
+
+ showRepos, _, err := repo_model.SearchRepositoryByCondition(ctx, &repoOpts, userRepoCond, false)
+ if err != nil {
+ ctx.ServerError("SearchRepositoryByCondition", err)
+ return
+ }
+ sort.Sort(showRepos)
+
+ for i := 0; i < len(milestones); {
+ for _, repo := range showRepos {
+ if milestones[i].RepoID == repo.ID {
+ milestones[i].Repo = repo
+ break
+ }
+ }
+ if milestones[i].Repo == nil {
+ log.Warn("Cannot find milestone %d 's repository %d", milestones[i].ID, milestones[i].RepoID)
+ milestones = append(milestones[:i], milestones[i+1:]...)
+ continue
+ }
+
+ milestones[i].RenderedContent, err = markdown.RenderString(&markup.RenderContext{
+ Links: markup.Links{
+ Base: milestones[i].Repo.Link(),
+ },
+ Metas: milestones[i].Repo.ComposeMetas(ctx),
+ Ctx: ctx,
+ }, milestones[i].Content)
+ if err != nil {
+ ctx.ServerError("RenderString", err)
+ return
+ }
+
+ if milestones[i].Repo.IsTimetrackerEnabled(ctx) {
+ err := milestones[i].LoadTotalTrackedTime(ctx)
+ if err != nil {
+ ctx.ServerError("LoadTotalTrackedTime", err)
+ return
+ }
+ }
+ i++
+ }
+
+ milestoneStats, err := issues_model.GetMilestonesStatsByRepoCondAndKw(ctx, repoCond, keyword)
+ if err != nil {
+ ctx.ServerError("GetMilestoneStats", err)
+ return
+ }
+
+ var totalMilestoneStats *issues_model.MilestonesStats
+ if len(repoIDs) == 0 {
+ totalMilestoneStats = milestoneStats
+ } else {
+ totalMilestoneStats, err = issues_model.GetMilestonesStatsByRepoCondAndKw(ctx, userRepoCond, keyword)
+ if err != nil {
+ ctx.ServerError("GetMilestoneStats", err)
+ return
+ }
+ }
+
+ showRepoIDs := make(container.Set[int64], len(showRepos))
+ for _, repo := range showRepos {
+ if repo.ID > 0 {
+ showRepoIDs.Add(repo.ID)
+ }
+ }
+ if len(repoIDs) == 0 {
+ repoIDs = showRepoIDs.Values()
+ }
+ repoIDs = slices.DeleteFunc(repoIDs, func(v int64) bool {
+ return !showRepoIDs.Contains(v)
+ })
+
+ var pagerCount int
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ ctx.Data["Total"] = totalMilestoneStats.ClosedCount
+ pagerCount = int(milestoneStats.ClosedCount)
+ } else {
+ ctx.Data["State"] = "open"
+ ctx.Data["Total"] = totalMilestoneStats.OpenCount
+ pagerCount = int(milestoneStats.OpenCount)
+ }
+
+ ctx.Data["Milestones"] = milestones
+ ctx.Data["Repos"] = showRepos
+ ctx.Data["Counts"] = counts
+ ctx.Data["MilestoneStats"] = milestoneStats
+ ctx.Data["SortType"] = sortType
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["RepoIDs"] = repoIDs
+ ctx.Data["IsShowClosed"] = isShowClosed
+
+ pager := context.NewPagination(pagerCount, setting.UI.IssuePagingNum, page, 5)
+ pager.AddParam(ctx, "q", "Keyword")
+ pager.AddParam(ctx, "repos", "RepoIDs")
+ pager.AddParam(ctx, "sort", "SortType")
+ pager.AddParam(ctx, "state", "State")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplMilestones)
+}
+
+// Pulls renders the user's pull request overview page
+func Pulls(ctx *context.Context) {
+ if unit.TypePullRequests.UnitGlobalDisabled() {
+ log.Debug("Pull request overview page not available as it is globally disabled.")
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("pull_requests")
+ ctx.Data["PageIsPulls"] = true
+ buildIssueOverview(ctx, unit.TypePullRequests)
+}
+
+// Issues renders the user's issues overview page
+func Issues(ctx *context.Context) {
+ if unit.TypeIssues.UnitGlobalDisabled() {
+ log.Debug("Issues overview page not available as it is globally disabled.")
+ ctx.Status(http.StatusNotFound)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("issues")
+ ctx.Data["PageIsIssues"] = true
+ buildIssueOverview(ctx, unit.TypeIssues)
+}
+
+// Regexp for repos query
+var issueReposQueryPattern = regexp.MustCompile(`^\[\d+(,\d+)*,?\]$`)
+
+func buildIssueOverview(ctx *context.Context, unitType unit.Type) {
+ // ----------------------------------------------------
+ // Determine user; can be either user or organization.
+ // Return with NotFound or ServerError if unsuccessful.
+ // ----------------------------------------------------
+
+ ctxUser := getDashboardContextUser(ctx)
+ if ctx.Written() {
+ return
+ }
+
+ var (
+ viewType string
+ sortType = ctx.FormString("sort")
+ filterMode int
+ )
+
+ // Default to recently updated, unlike repository issues list
+ if sortType == "" {
+ sortType = "recentupdate"
+ }
+
+ // --------------------------------------------------------------------------------
+ // Distinguish User from Organization.
+ // Org:
+ // - Remember pre-determined viewType string for later. Will be posted to ctx.Data.
+ // Organization does not have view type and filter mode.
+ // User:
+ // - Use ctx.FormString("type") to determine filterMode.
+ // The type is set when clicking for example "assigned to me" on the overview page.
+ // - Remember either this or a fallback. Will be posted to ctx.Data.
+ // --------------------------------------------------------------------------------
+
+ // TODO: distinguish during routing
+
+ viewType = ctx.FormString("type")
+ switch viewType {
+ case "assigned":
+ filterMode = issues_model.FilterModeAssign
+ case "mentioned":
+ filterMode = issues_model.FilterModeMention
+ case "review_requested":
+ filterMode = issues_model.FilterModeReviewRequested
+ case "reviewed_by":
+ filterMode = issues_model.FilterModeReviewed
+ case "your_repositories":
+ filterMode = issues_model.FilterModeYourRepositories
+ case "created_by":
+ fallthrough
+ default:
+ filterMode = issues_model.FilterModeCreate
+ viewType = "created_by"
+ }
+
+ // --------------------------------------------------------------------------
+ // Build opts (IssuesOptions), which contains filter information.
+ // Will eventually be used to retrieve issues relevant for the overview page.
+ // Note: Non-final states of opts are used in-between, namely for:
+ // - Keyword search
+ // - Count Issues by repo
+ // --------------------------------------------------------------------------
+
+ // Get repository IDs where User/Org/Team has access.
+ var team *organization.Team
+ var org *organization.Organization
+ if ctx.Org != nil {
+ org = ctx.Org.Organization
+ team = ctx.Org.Team
+ }
+
+ isPullList := unitType == unit.TypePullRequests
+ opts := &issues_model.IssuesOptions{
+ IsPull: optional.Some(isPullList),
+ SortType: sortType,
+ IsArchived: optional.Some(false),
+ Org: org,
+ Team: team,
+ User: ctx.Doer,
+ }
+
+ isFuzzy := ctx.FormOptionalBool("fuzzy").ValueOrDefault(true)
+
+ // Search all repositories which
+ //
+ // As user:
+ // - Owns the repository.
+ // - Have collaborator permissions in repository.
+ //
+ // As org:
+ // - Owns the repository.
+ //
+ // As team:
+ // - Team org's owns the repository.
+ // - Team has read permission to repository.
+ repoOpts := &repo_model.SearchRepoOptions{
+ Actor: ctx.Doer,
+ OwnerID: ctxUser.ID,
+ Private: true,
+ AllPublic: false,
+ AllLimited: false,
+ Collaborate: optional.None[bool](),
+ UnitType: unitType,
+ Archived: optional.Some(false),
+ }
+ if team != nil {
+ repoOpts.TeamID = team.ID
+ }
+ accessibleRepos := container.Set[int64]{}
+ {
+ ids, _, err := repo_model.SearchRepositoryIDs(ctx, repoOpts)
+ if err != nil {
+ ctx.ServerError("SearchRepositoryIDs", err)
+ return
+ }
+ accessibleRepos.AddMultiple(ids...)
+ opts.RepoIDs = ids
+ if len(opts.RepoIDs) == 0 {
+ // no repos found, don't let the indexer return all repos
+ opts.RepoIDs = []int64{0}
+ }
+ }
+ if ctx.Doer.ID == ctxUser.ID && filterMode != issues_model.FilterModeYourRepositories {
+ // If the doer is the same as the context user, which means the doer is viewing his own dashboard,
+ // it's not enough to show the repos that the doer owns or has been explicitly granted access to,
+ // because the doer may create issues or be mentioned in any public repo.
+ // So we need search issues in all public repos.
+ opts.AllPublic = true
+ }
+
+ switch filterMode {
+ case issues_model.FilterModeAll:
+ case issues_model.FilterModeYourRepositories:
+ case issues_model.FilterModeAssign:
+ opts.AssigneeID = ctx.Doer.ID
+ case issues_model.FilterModeCreate:
+ opts.PosterID = ctx.Doer.ID
+ case issues_model.FilterModeMention:
+ opts.MentionedID = ctx.Doer.ID
+ case issues_model.FilterModeReviewRequested:
+ opts.ReviewRequestedID = ctx.Doer.ID
+ case issues_model.FilterModeReviewed:
+ opts.ReviewedID = ctx.Doer.ID
+ }
+
+ // keyword holds the search term entered into the search field.
+ keyword := strings.Trim(ctx.FormString("q"), " ")
+ ctx.Data["Keyword"] = keyword
+
+ // Educated guess: Do or don't show closed issues.
+ isShowClosed := ctx.FormString("state") == "closed"
+ opts.IsClosed = optional.Some(isShowClosed)
+
+ // Make sure page number is at least 1. Will be posted to ctx.Data.
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ opts.Paginator = &db.ListOptions{
+ Page: page,
+ PageSize: setting.UI.IssuePagingNum,
+ }
+
+ // Get IDs for labels (a filter option for issues/pulls).
+ // Required for IssuesOptions.
+ selectedLabels := ctx.FormString("labels")
+ if len(selectedLabels) > 0 && selectedLabels != "0" {
+ var err error
+ opts.LabelIDs, err = base.StringsToInt64s(strings.Split(selectedLabels, ","))
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("invalid_data", selectedLabels), true)
+ }
+ }
+
+ if org != nil {
+ // Get Org Labels
+ labels, err := issues_model.GetLabelsByOrgID(ctx, ctx.Org.Organization.ID, ctx.FormString("sort"), db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("GetLabelsByOrgID", err)
+ return
+ }
+
+ // Get the exclusive scope for every label ID
+ labelExclusiveScopes := make([]string, 0, len(opts.LabelIDs))
+ for _, labelID := range opts.LabelIDs {
+ foundExclusiveScope := false
+ for _, label := range labels {
+ if label.ID == labelID || label.ID == -labelID {
+ labelExclusiveScopes = append(labelExclusiveScopes, label.ExclusiveScope())
+ foundExclusiveScope = true
+ break
+ }
+ }
+ if !foundExclusiveScope {
+ labelExclusiveScopes = append(labelExclusiveScopes, "")
+ }
+ }
+
+ for _, l := range labels {
+ l.LoadSelectedLabelsAfterClick(opts.LabelIDs, labelExclusiveScopes)
+ }
+ ctx.Data["Labels"] = labels
+ }
+
+ // ------------------------------
+ // Get issues as defined by opts.
+ // ------------------------------
+
+ // Slice of Issues that will be displayed on the overview page
+ // USING FINAL STATE OF opts FOR A QUERY.
+ var issues issues_model.IssueList
+ {
+ issueIDs, _, err := issue_indexer.SearchIssues(ctx, issue_indexer.ToSearchOptions(keyword, opts).Copy(
+ func(o *issue_indexer.SearchOptions) { o.IsFuzzyKeyword = isFuzzy },
+ ))
+ if err != nil {
+ ctx.ServerError("issueIDsFromSearch", err)
+ return
+ }
+ issues, err = issues_model.GetIssuesByIDs(ctx, issueIDs, true)
+ if err != nil {
+ ctx.ServerError("GetIssuesByIDs", err)
+ return
+ }
+ }
+
+ commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues)
+ if err != nil {
+ ctx.ServerError("GetIssuesLastCommitStatus", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+
+ // -------------------------------
+ // Fill stats to post to ctx.Data.
+ // -------------------------------
+ issueStats, err := getUserIssueStats(ctx, ctxUser, filterMode, issue_indexer.ToSearchOptions(keyword, opts).Copy(
+ func(o *issue_indexer.SearchOptions) { o.IsFuzzyKeyword = isFuzzy },
+ ))
+ if err != nil {
+ ctx.ServerError("getUserIssueStats", err)
+ return
+ }
+
+ // Will be posted to ctx.Data.
+ var shownIssues int
+ if !isShowClosed {
+ shownIssues = int(issueStats.OpenCount)
+ } else {
+ shownIssues = int(issueStats.ClosedCount)
+ }
+
+ ctx.Data["IsShowClosed"] = isShowClosed
+
+ ctx.Data["IssueRefEndNames"], ctx.Data["IssueRefURLs"] = issue_service.GetRefEndNamesAndURLs(issues, ctx.FormString("RepoLink"))
+
+ if err := issues.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("issues.LoadAttributes", err)
+ return
+ }
+ ctx.Data["Issues"] = issues
+
+ approvalCounts, err := issues.GetApprovalCounts(ctx)
+ if err != nil {
+ ctx.ServerError("ApprovalCounts", err)
+ return
+ }
+ ctx.Data["ApprovalCounts"] = func(issueID int64, typ string) int64 {
+ counts, ok := approvalCounts[issueID]
+ if !ok || len(counts) == 0 {
+ return 0
+ }
+ reviewTyp := issues_model.ReviewTypeApprove
+ if typ == "reject" {
+ reviewTyp = issues_model.ReviewTypeReject
+ } else if typ == "waiting" {
+ reviewTyp = issues_model.ReviewTypeRequest
+ }
+ for _, count := range counts {
+ if count.Type == reviewTyp {
+ return count.Count
+ }
+ }
+ return 0
+ }
+ ctx.Data["CommitLastStatus"] = lastStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+ ctx.Data["IssueStats"] = issueStats
+ ctx.Data["ViewType"] = viewType
+ ctx.Data["SortType"] = sortType
+ ctx.Data["IsShowClosed"] = isShowClosed
+ ctx.Data["SelectLabels"] = selectedLabels
+ ctx.Data["PageIsOrgIssues"] = org != nil
+ ctx.Data["IsFuzzy"] = isFuzzy
+
+ if isShowClosed {
+ ctx.Data["State"] = "closed"
+ } else {
+ ctx.Data["State"] = "open"
+ }
+
+ pager := context.NewPagination(shownIssues, setting.UI.IssuePagingNum, page, 5)
+ pager.AddParam(ctx, "q", "Keyword")
+ pager.AddParam(ctx, "type", "ViewType")
+ pager.AddParam(ctx, "sort", "SortType")
+ pager.AddParam(ctx, "state", "State")
+ pager.AddParam(ctx, "labels", "SelectLabels")
+ pager.AddParam(ctx, "milestone", "MilestoneID")
+ pager.AddParam(ctx, "assignee", "AssigneeID")
+ pager.AddParam(ctx, "fuzzy", "IsFuzzy")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplIssues)
+}
+
+// ShowSSHKeys output all the ssh keys of user by uid
+func ShowSSHKeys(ctx *context.Context) {
+ keys, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ OwnerID: ctx.ContextUser.ID,
+ })
+ if err != nil {
+ ctx.ServerError("ListPublicKeys", err)
+ return
+ }
+
+ var buf bytes.Buffer
+ for i := range keys {
+ buf.WriteString(keys[i].OmitEmail())
+ buf.WriteString("\n")
+ }
+ ctx.PlainTextBytes(http.StatusOK, buf.Bytes())
+}
+
+// ShowGPGKeys output all the public GPG keys of user by uid
+func ShowGPGKeys(ctx *context.Context) {
+ keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: ctx.ContextUser.ID,
+ })
+ if err != nil {
+ ctx.ServerError("ListGPGKeys", err)
+ return
+ }
+
+ entities := make([]*openpgp.Entity, 0)
+ failedEntitiesID := make([]string, 0)
+ for _, k := range keys {
+ e, err := asymkey_model.GPGKeyToEntity(ctx, k)
+ if err != nil {
+ if asymkey_model.IsErrGPGKeyImportNotExist(err) {
+ failedEntitiesID = append(failedEntitiesID, k.KeyID)
+ continue // Skip previous import without backup of imported armored key
+ }
+ ctx.ServerError("ShowGPGKeys", err)
+ return
+ }
+ entities = append(entities, e)
+ }
+ var buf bytes.Buffer
+
+ headers := make(map[string]string)
+ if len(failedEntitiesID) > 0 { // If some key need re-import to be exported
+ headers["Note"] = fmt.Sprintf("The keys with the following IDs couldn't be exported and need to be reuploaded %s", strings.Join(failedEntitiesID, ", "))
+ } else if len(entities) == 0 {
+ headers["Note"] = "This user hasn't uploaded any GPG keys."
+ }
+ writer, _ := armor.Encode(&buf, "PGP PUBLIC KEY BLOCK", headers)
+ for _, e := range entities {
+ err = e.Serialize(writer) // TODO find why key are exported with a different cipherTypeByte as original (should not be blocking but strange)
+ if err != nil {
+ ctx.ServerError("ShowGPGKeys", err)
+ return
+ }
+ }
+ writer.Close()
+ ctx.PlainTextBytes(http.StatusOK, buf.Bytes())
+}
+
+func UsernameSubRoute(ctx *context.Context) {
+ // WORKAROUND to support usernames with "." in it
+ // https://github.com/go-chi/chi/issues/781
+ username := ctx.Params("username")
+ reloadParam := func(suffix string) (success bool) {
+ ctx.SetParams("username", strings.TrimSuffix(username, suffix))
+ context.UserAssignmentWeb()(ctx)
+ if ctx.Written() {
+ return false
+ }
+ // check view permissions
+ if !user_model.IsUserVisibleToViewer(ctx, ctx.ContextUser, ctx.Doer) {
+ ctx.NotFound("User not visible", nil)
+ return false
+ }
+ return true
+ }
+ switch {
+ case strings.HasSuffix(username, ".png"):
+ if reloadParam(".png") {
+ AvatarByUserName(ctx)
+ }
+ case strings.HasSuffix(username, ".keys"):
+ if reloadParam(".keys") {
+ ShowSSHKeys(ctx)
+ }
+ case strings.HasSuffix(username, ".gpg"):
+ if reloadParam(".gpg") {
+ ShowGPGKeys(ctx)
+ }
+ case strings.HasSuffix(username, ".rss"):
+ if !setting.Other.EnableFeed {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if reloadParam(".rss") {
+ feed.ShowUserFeedRSS(ctx)
+ }
+ case strings.HasSuffix(username, ".atom"):
+ if !setting.Other.EnableFeed {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ if reloadParam(".atom") {
+ feed.ShowUserFeedAtom(ctx)
+ }
+ default:
+ context.UserAssignmentWeb()(ctx)
+ if !ctx.Written() {
+ ctx.Data["EnableFeed"] = setting.Other.EnableFeed
+ OwnerProfile(ctx)
+ }
+ }
+}
+
+func getUserIssueStats(ctx *context.Context, ctxUser *user_model.User, filterMode int, opts *issue_indexer.SearchOptions) (*issues_model.IssueStats, error) {
+ doerID := ctx.Doer.ID
+
+ opts = opts.Copy(func(o *issue_indexer.SearchOptions) {
+ // If the doer is the same as the context user, which means the doer is viewing his own dashboard,
+ // it's not enough to show the repos that the doer owns or has been explicitly granted access to,
+ // because the doer may create issues or be mentioned in any public repo.
+ // So we need search issues in all public repos.
+ o.AllPublic = doerID == ctxUser.ID
+ o.AssigneeID = nil
+ o.PosterID = nil
+ o.MentionID = nil
+ o.ReviewRequestedID = nil
+ o.ReviewedID = nil
+ })
+
+ var (
+ err error
+ ret = &issues_model.IssueStats{}
+ )
+
+ {
+ openClosedOpts := opts.Copy()
+ switch filterMode {
+ case issues_model.FilterModeAll:
+ // no-op
+ case issues_model.FilterModeYourRepositories:
+ openClosedOpts.AllPublic = false
+ case issues_model.FilterModeAssign:
+ openClosedOpts.AssigneeID = optional.Some(doerID)
+ case issues_model.FilterModeCreate:
+ openClosedOpts.PosterID = optional.Some(doerID)
+ case issues_model.FilterModeMention:
+ openClosedOpts.MentionID = optional.Some(doerID)
+ case issues_model.FilterModeReviewRequested:
+ openClosedOpts.ReviewRequestedID = optional.Some(doerID)
+ case issues_model.FilterModeReviewed:
+ openClosedOpts.ReviewedID = optional.Some(doerID)
+ }
+ openClosedOpts.IsClosed = optional.Some(false)
+ ret.OpenCount, err = issue_indexer.CountIssues(ctx, openClosedOpts)
+ if err != nil {
+ return nil, err
+ }
+ openClosedOpts.IsClosed = optional.Some(true)
+ ret.ClosedCount, err = issue_indexer.CountIssues(ctx, openClosedOpts)
+ if err != nil {
+ return nil, err
+ }
+ }
+
+ ret.YourRepositoriesCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.AllPublic = false }))
+ if err != nil {
+ return nil, err
+ }
+ ret.AssignCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.AssigneeID = optional.Some(doerID) }))
+ if err != nil {
+ return nil, err
+ }
+ ret.CreateCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.PosterID = optional.Some(doerID) }))
+ if err != nil {
+ return nil, err
+ }
+ ret.MentionCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.MentionID = optional.Some(doerID) }))
+ if err != nil {
+ return nil, err
+ }
+ ret.ReviewRequestedCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.ReviewRequestedID = optional.Some(doerID) }))
+ if err != nil {
+ return nil, err
+ }
+ ret.ReviewedCount, err = issue_indexer.CountIssues(ctx, opts.Copy(func(o *issue_indexer.SearchOptions) { o.ReviewedID = optional.Some(doerID) }))
+ if err != nil {
+ return nil, err
+ }
+ return ret, nil
+}
diff --git a/routers/web/user/home_test.go b/routers/web/user/home_test.go
new file mode 100644
index 0000000..e1c8ca9
--- /dev/null
+++ b/routers/web/user/home_test.go
@@ -0,0 +1,169 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestArchivedIssues(t *testing.T) {
+ // Arrange
+ setting.UI.IssuePagingNum = 1
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "issues")
+ contexttest.LoadUser(t, ctx, 30)
+ ctx.Req.Form.Set("state", "open")
+ ctx.Req.Form.Set("type", "your_repositories")
+
+ // Assume: User 30 has access to two Repos with Issues, one of the Repos being archived.
+ repos, _, _ := repo_model.GetUserRepositories(db.DefaultContext, &repo_model.SearchRepoOptions{Actor: ctx.Doer})
+ assert.Len(t, repos, 3)
+ IsArchived := make(map[int64]bool)
+ NumIssues := make(map[int64]int)
+ for _, repo := range repos {
+ IsArchived[repo.ID] = repo.IsArchived
+ NumIssues[repo.ID] = repo.NumIssues
+ }
+ assert.False(t, IsArchived[50])
+ assert.EqualValues(t, 1, NumIssues[50])
+ assert.True(t, IsArchived[51])
+ assert.EqualValues(t, 1, NumIssues[51])
+
+ // Act
+ Issues(ctx)
+
+ // Assert: One Issue (ID 30) from one Repo (ID 50) is retrieved, while nothing from archived Repo 51 is retrieved
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+
+ assert.Len(t, ctx.Data["Issues"], 1)
+}
+
+func TestIssues(t *testing.T) {
+ setting.UI.IssuePagingNum = 1
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "issues")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.Req.Form.Set("state", "closed")
+ Issues(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+
+ assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
+ assert.Len(t, ctx.Data["Issues"], 1)
+}
+
+func TestPulls(t *testing.T) {
+ setting.UI.IssuePagingNum = 20
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "pulls")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.Req.Form.Set("state", "open")
+ ctx.Req.Form.Set("type", "your_repositories")
+ Pulls(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+
+ assert.Len(t, ctx.Data["Issues"], 5)
+}
+
+func TestMilestones(t *testing.T) {
+ setting.UI.IssuePagingNum = 1
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "milestones")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.SetParams("sort", "issues")
+ ctx.Req.Form.Set("state", "closed")
+ ctx.Req.Form.Set("sort", "furthestduedate")
+ Milestones(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, map[int64]int64{1: 1}, ctx.Data["Counts"])
+ assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
+ assert.EqualValues(t, "furthestduedate", ctx.Data["SortType"])
+ assert.EqualValues(t, 1, ctx.Data["Total"])
+ assert.Len(t, ctx.Data["Milestones"], 1)
+ assert.Len(t, ctx.Data["Repos"], 2) // both repo 42 and 1 have milestones and both are owned by user 2
+}
+
+func TestMilestonesForSpecificRepo(t *testing.T) {
+ setting.UI.IssuePagingNum = 1
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "milestones")
+ contexttest.LoadUser(t, ctx, 2)
+ ctx.SetParams("sort", "issues")
+ ctx.SetParams("repo", "1")
+ ctx.Req.Form.Set("state", "closed")
+ ctx.Req.Form.Set("sort", "furthestduedate")
+ Milestones(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.EqualValues(t, map[int64]int64{1: 1}, ctx.Data["Counts"])
+ assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
+ assert.EqualValues(t, "furthestduedate", ctx.Data["SortType"])
+ assert.EqualValues(t, 1, ctx.Data["Total"])
+ assert.Len(t, ctx.Data["Milestones"], 1)
+ assert.Len(t, ctx.Data["Repos"], 2) // both repo 42 and 1 have milestones and both are owned by user 2
+}
+
+func TestDashboardPagination(t *testing.T) {
+ ctx, _ := contexttest.MockContext(t, "/", contexttest.MockContextOption{Render: templates.HTMLRenderer()})
+ page := context.NewPagination(10, 3, 1, 3)
+
+ setting.AppSubURL = "/SubPath"
+ out, err := ctx.RenderToHTML("base/paginate", map[string]any{"Link": setting.AppSubURL, "Page": page})
+ require.NoError(t, err)
+ assert.Contains(t, out, `<a class=" item navigation" href="/SubPath/?page=2">`)
+
+ setting.AppSubURL = ""
+ out, err = ctx.RenderToHTML("base/paginate", map[string]any{"Link": setting.AppSubURL, "Page": page})
+ require.NoError(t, err)
+ assert.Contains(t, out, `<a class=" item navigation" href="/?page=2">`)
+}
+
+func TestOrgLabels(t *testing.T) {
+ require.NoError(t, unittest.LoadFixtures())
+
+ ctx, _ := contexttest.MockContext(t, "org/org3/issues")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadOrganization(t, ctx, 3)
+ Issues(ctx)
+ assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+
+ assert.True(t, ctx.Data["PageIsOrgIssues"].(bool))
+
+ orgLabels := []struct {
+ ID int64
+ OrgID int64
+ Name string
+ }{
+ {3, 3, "orglabel3"},
+ {4, 3, "orglabel4"},
+ }
+
+ labels, ok := ctx.Data["Labels"].([]*issues_model.Label)
+
+ assert.True(t, ok)
+
+ if assert.Len(t, labels, len(orgLabels)) {
+ for i, label := range labels {
+ assert.EqualValues(t, orgLabels[i].OrgID, label.OrgID)
+ assert.EqualValues(t, orgLabels[i].ID, label.ID)
+ assert.EqualValues(t, orgLabels[i].Name, label.Name)
+ }
+ }
+}
diff --git a/routers/web/user/main_test.go b/routers/web/user/main_test.go
new file mode 100644
index 0000000..8b6ae69
--- /dev/null
+++ b/routers/web/user/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/user/notification.go b/routers/web/user/notification.go
new file mode 100644
index 0000000..dfcaf58
--- /dev/null
+++ b/routers/web/user/notification.go
@@ -0,0 +1,485 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ goctx "context"
+ "errors"
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ issues_model "code.gitea.io/gitea/models/issues"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ issue_service "code.gitea.io/gitea/services/issue"
+ pull_service "code.gitea.io/gitea/services/pull"
+)
+
+const (
+ tplNotification base.TplName = "user/notification/notification"
+ tplNotificationDiv base.TplName = "user/notification/notification_div"
+ tplNotificationSubscriptions base.TplName = "user/notification/notification_subscriptions"
+)
+
+// GetNotificationCount is the middleware that sets the notification count in the context
+func GetNotificationCount(ctx *context.Context) {
+ if strings.HasPrefix(ctx.Req.URL.Path, "/api") {
+ return
+ }
+
+ if !ctx.IsSigned {
+ return
+ }
+
+ ctx.Data["NotificationUnreadCount"] = func() int64 {
+ count, err := db.Count[activities_model.Notification](ctx, activities_model.FindNotificationOptions{
+ UserID: ctx.Doer.ID,
+ Status: []activities_model.NotificationStatus{activities_model.NotificationStatusUnread},
+ })
+ if err != nil {
+ if err != goctx.Canceled {
+ log.Error("Unable to GetNotificationCount for user:%-v: %v", ctx.Doer, err)
+ }
+ return -1
+ }
+
+ return count
+ }
+}
+
+// Notifications is the notifications page
+func Notifications(ctx *context.Context) {
+ getNotifications(ctx)
+ if ctx.Written() {
+ return
+ }
+ if ctx.FormBool("div-only") {
+ ctx.Data["SequenceNumber"] = ctx.FormString("sequence-number")
+ ctx.HTML(http.StatusOK, tplNotificationDiv)
+ return
+ }
+ ctx.HTML(http.StatusOK, tplNotification)
+}
+
+func getNotifications(ctx *context.Context) {
+ var (
+ keyword = ctx.FormTrim("q")
+ status activities_model.NotificationStatus
+ page = ctx.FormInt("page")
+ perPage = ctx.FormInt("perPage")
+ )
+ if page < 1 {
+ page = 1
+ }
+ if perPage < 1 {
+ perPage = 20
+ }
+
+ switch keyword {
+ case "read":
+ status = activities_model.NotificationStatusRead
+ default:
+ status = activities_model.NotificationStatusUnread
+ }
+
+ total, err := db.Count[activities_model.Notification](ctx, activities_model.FindNotificationOptions{
+ UserID: ctx.Doer.ID,
+ Status: []activities_model.NotificationStatus{status},
+ })
+ if err != nil {
+ ctx.ServerError("ErrGetNotificationCount", err)
+ return
+ }
+
+ // redirect to last page if request page is more than total pages
+ pager := context.NewPagination(int(total), perPage, page, 5)
+ if pager.Paginater.Current() < page {
+ ctx.Redirect(fmt.Sprintf("%s/notifications?q=%s&page=%d", setting.AppSubURL, url.QueryEscape(ctx.FormString("q")), pager.Paginater.Current()))
+ return
+ }
+
+ statuses := []activities_model.NotificationStatus{status, activities_model.NotificationStatusPinned}
+ nls, err := db.Find[activities_model.Notification](ctx, activities_model.FindNotificationOptions{
+ ListOptions: db.ListOptions{
+ PageSize: perPage,
+ Page: page,
+ },
+ UserID: ctx.Doer.ID,
+ Status: statuses,
+ })
+ if err != nil {
+ ctx.ServerError("db.Find[activities_model.Notification]", err)
+ return
+ }
+
+ notifications := activities_model.NotificationList(nls)
+
+ failCount := 0
+
+ repos, failures, err := notifications.LoadRepos(ctx)
+ if err != nil {
+ ctx.ServerError("LoadRepos", err)
+ return
+ }
+ notifications = notifications.Without(failures)
+ if err := repos.LoadAttributes(ctx); err != nil {
+ ctx.ServerError("LoadAttributes", err)
+ return
+ }
+ failCount += len(failures)
+
+ failures, err = notifications.LoadIssues(ctx)
+ if err != nil {
+ ctx.ServerError("LoadIssues", err)
+ return
+ }
+
+ if err = notifications.LoadIssuePullRequests(ctx); err != nil {
+ ctx.ServerError("LoadIssuePullRequests", err)
+ return
+ }
+
+ notifications = notifications.Without(failures)
+ failCount += len(failures)
+
+ failures, err = notifications.LoadComments(ctx)
+ if err != nil {
+ ctx.ServerError("LoadComments", err)
+ return
+ }
+ notifications = notifications.Without(failures)
+ failCount += len(failures)
+
+ if failCount > 0 {
+ ctx.Flash.Error(fmt.Sprintf("ERROR: %d notifications were removed due to missing parts - check the logs", failCount))
+ }
+
+ ctx.Data["Title"] = ctx.Tr("notifications")
+ ctx.Data["Keyword"] = keyword
+ ctx.Data["Status"] = status
+ ctx.Data["Notifications"] = notifications
+
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+}
+
+// NotificationStatusPost is a route for changing the status of a notification
+func NotificationStatusPost(ctx *context.Context) {
+ var (
+ notificationID = ctx.FormInt64("notification_id")
+ statusStr = ctx.FormString("status")
+ status activities_model.NotificationStatus
+ )
+
+ switch statusStr {
+ case "read":
+ status = activities_model.NotificationStatusRead
+ case "unread":
+ status = activities_model.NotificationStatusUnread
+ case "pinned":
+ status = activities_model.NotificationStatusPinned
+ default:
+ ctx.ServerError("InvalidNotificationStatus", errors.New("Invalid notification status"))
+ return
+ }
+
+ if _, err := activities_model.SetNotificationStatus(ctx, notificationID, ctx.Doer, status); err != nil {
+ ctx.ServerError("SetNotificationStatus", err)
+ return
+ }
+
+ if !ctx.FormBool("noredirect") {
+ url := fmt.Sprintf("%s/notifications?page=%s", setting.AppSubURL, url.QueryEscape(ctx.FormString("page")))
+ ctx.Redirect(url, http.StatusSeeOther)
+ }
+
+ getNotifications(ctx)
+ if ctx.Written() {
+ return
+ }
+ ctx.Data["Link"] = setting.AppSubURL + "/notifications"
+ ctx.Data["SequenceNumber"] = ctx.Req.PostFormValue("sequence-number")
+
+ ctx.HTML(http.StatusOK, tplNotificationDiv)
+}
+
+// NotificationPurgePost is a route for 'purging' the list of notifications - marking all unread as read
+func NotificationPurgePost(ctx *context.Context) {
+ err := activities_model.UpdateNotificationStatuses(ctx, ctx.Doer, activities_model.NotificationStatusUnread, activities_model.NotificationStatusRead)
+ if err != nil {
+ ctx.ServerError("UpdateNotificationStatuses", err)
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL+"/notifications", http.StatusSeeOther)
+}
+
+// NotificationSubscriptions returns the list of subscribed issues
+func NotificationSubscriptions(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page < 1 {
+ page = 1
+ }
+
+ sortType := ctx.FormString("sort")
+ ctx.Data["SortType"] = sortType
+
+ state := ctx.FormString("state")
+ if !util.SliceContainsString([]string{"all", "open", "closed"}, state, true) {
+ state = "all"
+ }
+
+ ctx.Data["State"] = state
+ // default state filter is "all"
+ showClosed := optional.None[bool]()
+ switch state {
+ case "closed":
+ showClosed = optional.Some(true)
+ case "open":
+ showClosed = optional.Some(false)
+ }
+
+ issueType := ctx.FormString("issueType")
+ // default issue type is no filter
+ issueTypeBool := optional.None[bool]()
+ switch issueType {
+ case "issues":
+ issueTypeBool = optional.Some(false)
+ case "pulls":
+ issueTypeBool = optional.Some(true)
+ }
+ ctx.Data["IssueType"] = issueType
+
+ var labelIDs []int64
+ selectedLabels := ctx.FormString("labels")
+ ctx.Data["Labels"] = selectedLabels
+ if len(selectedLabels) > 0 && selectedLabels != "0" {
+ var err error
+ labelIDs, err = base.StringsToInt64s(strings.Split(selectedLabels, ","))
+ if err != nil {
+ ctx.Flash.Error(ctx.Tr("invalid_data", selectedLabels), true)
+ }
+ }
+
+ count, err := issues_model.CountIssues(ctx, &issues_model.IssuesOptions{
+ SubscriberID: ctx.Doer.ID,
+ IsClosed: showClosed,
+ IsPull: issueTypeBool,
+ LabelIDs: labelIDs,
+ })
+ if err != nil {
+ ctx.ServerError("CountIssues", err)
+ return
+ }
+ issues, err := issues_model.Issues(ctx, &issues_model.IssuesOptions{
+ Paginator: &db.ListOptions{
+ PageSize: setting.UI.IssuePagingNum,
+ Page: page,
+ },
+ SubscriberID: ctx.Doer.ID,
+ SortType: sortType,
+ IsClosed: showClosed,
+ IsPull: issueTypeBool,
+ LabelIDs: labelIDs,
+ })
+ if err != nil {
+ ctx.ServerError("Issues", err)
+ return
+ }
+
+ commitStatuses, lastStatus, err := pull_service.GetIssuesAllCommitStatus(ctx, issues)
+ if err != nil {
+ ctx.ServerError("GetIssuesAllCommitStatus", err)
+ return
+ }
+ if !ctx.Repo.CanRead(unit.TypeActions) {
+ for key := range commitStatuses {
+ git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
+ }
+ }
+ ctx.Data["CommitLastStatus"] = lastStatus
+ ctx.Data["CommitStatuses"] = commitStatuses
+ ctx.Data["Issues"] = issues
+
+ ctx.Data["IssueRefEndNames"], ctx.Data["IssueRefURLs"] = issue_service.GetRefEndNamesAndURLs(issues, "")
+
+ commitStatus, err := pull_service.GetIssuesLastCommitStatus(ctx, issues)
+ if err != nil {
+ ctx.ServerError("GetIssuesLastCommitStatus", err)
+ return
+ }
+ ctx.Data["CommitStatus"] = commitStatus
+
+ approvalCounts, err := issues.GetApprovalCounts(ctx)
+ if err != nil {
+ ctx.ServerError("ApprovalCounts", err)
+ return
+ }
+ ctx.Data["ApprovalCounts"] = func(issueID int64, typ string) int64 {
+ counts, ok := approvalCounts[issueID]
+ if !ok || len(counts) == 0 {
+ return 0
+ }
+ reviewTyp := issues_model.ReviewTypeApprove
+ if typ == "reject" {
+ reviewTyp = issues_model.ReviewTypeReject
+ } else if typ == "waiting" {
+ reviewTyp = issues_model.ReviewTypeRequest
+ }
+ for _, count := range counts {
+ if count.Type == reviewTyp {
+ return count.Count
+ }
+ }
+ return 0
+ }
+
+ ctx.Data["Status"] = 1
+ ctx.Data["Title"] = ctx.Tr("notification.subscriptions")
+
+ // redirect to last page if request page is more than total pages
+ pager := context.NewPagination(int(count), setting.UI.IssuePagingNum, page, 5)
+ if pager.Paginater.Current() < page {
+ ctx.Redirect(fmt.Sprintf("/notifications/subscriptions?page=%d", pager.Paginater.Current()))
+ return
+ }
+ pager.AddParam(ctx, "sort", "SortType")
+ pager.AddParam(ctx, "state", "State")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplNotificationSubscriptions)
+}
+
+// NotificationWatching returns the list of watching repos
+func NotificationWatching(ctx *context.Context) {
+ page := ctx.FormInt("page")
+ if page < 1 {
+ page = 1
+ }
+
+ keyword := ctx.FormTrim("q")
+ ctx.Data["Keyword"] = keyword
+
+ var orderBy db.SearchOrderBy
+ ctx.Data["SortType"] = ctx.FormString("sort")
+ switch ctx.FormString("sort") {
+ case "newest":
+ orderBy = db.SearchOrderByNewest
+ case "oldest":
+ orderBy = db.SearchOrderByOldest
+ case "recentupdate":
+ orderBy = db.SearchOrderByRecentUpdated
+ case "leastupdate":
+ orderBy = db.SearchOrderByLeastUpdated
+ case "reversealphabetically":
+ orderBy = db.SearchOrderByAlphabeticallyReverse
+ case "alphabetically":
+ orderBy = db.SearchOrderByAlphabetically
+ case "moststars":
+ orderBy = db.SearchOrderByStarsReverse
+ case "feweststars":
+ orderBy = db.SearchOrderByStars
+ case "mostforks":
+ orderBy = db.SearchOrderByForksReverse
+ case "fewestforks":
+ orderBy = db.SearchOrderByForks
+ default:
+ ctx.Data["SortType"] = "recentupdate"
+ orderBy = db.SearchOrderByRecentUpdated
+ }
+
+ archived := ctx.FormOptionalBool("archived")
+ ctx.Data["IsArchived"] = archived
+
+ fork := ctx.FormOptionalBool("fork")
+ ctx.Data["IsFork"] = fork
+
+ mirror := ctx.FormOptionalBool("mirror")
+ ctx.Data["IsMirror"] = mirror
+
+ template := ctx.FormOptionalBool("template")
+ ctx.Data["IsTemplate"] = template
+
+ private := ctx.FormOptionalBool("private")
+ ctx.Data["IsPrivate"] = private
+
+ repos, count, err := repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.User.RepoPagingNum,
+ Page: page,
+ },
+ Actor: ctx.Doer,
+ Keyword: keyword,
+ OrderBy: orderBy,
+ Private: ctx.IsSigned,
+ WatchedByID: ctx.Doer.ID,
+ Collaborate: optional.Some(false),
+ TopicOnly: ctx.FormBool("topic"),
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+ total := int(count)
+ ctx.Data["Total"] = total
+ ctx.Data["Repos"] = repos
+
+ // redirect to last page if request page is more than total pages
+ pager := context.NewPagination(total, setting.UI.User.RepoPagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ if archived.Has() {
+ pager.AddParamString("archived", fmt.Sprint(archived.Value()))
+ }
+ if fork.Has() {
+ pager.AddParamString("fork", fmt.Sprint(fork.Value()))
+ }
+ if mirror.Has() {
+ pager.AddParamString("mirror", fmt.Sprint(mirror.Value()))
+ }
+ if template.Has() {
+ pager.AddParamString("template", fmt.Sprint(template.Value()))
+ }
+ if private.Has() {
+ pager.AddParamString("private", fmt.Sprint(private.Value()))
+ }
+ ctx.Data["Page"] = pager
+
+ ctx.Data["Status"] = 2
+ ctx.Data["Title"] = ctx.Tr("notification.watching")
+
+ ctx.HTML(http.StatusOK, tplNotificationSubscriptions)
+}
+
+// NewAvailable returns the notification counts
+func NewAvailable(ctx *context.Context) {
+ total, err := db.Count[activities_model.Notification](ctx, activities_model.FindNotificationOptions{
+ UserID: ctx.Doer.ID,
+ Status: []activities_model.NotificationStatus{activities_model.NotificationStatusUnread},
+ })
+ if err != nil {
+ log.Error("db.Count[activities_model.Notification]", err)
+ ctx.JSON(http.StatusOK, structs.NotificationCount{New: 0})
+ return
+ }
+
+ ctx.JSON(http.StatusOK, structs.NotificationCount{New: total})
+}
diff --git a/routers/web/user/package.go b/routers/web/user/package.go
new file mode 100644
index 0000000..d47a36e
--- /dev/null
+++ b/routers/web/user/package.go
@@ -0,0 +1,513 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ org_model "code.gitea.io/gitea/models/organization"
+ packages_model "code.gitea.io/gitea/models/packages"
+ container_model "code.gitea.io/gitea/models/packages/container"
+ "code.gitea.io/gitea/models/perm"
+ access_model "code.gitea.io/gitea/models/perm/access"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ alpine_module "code.gitea.io/gitea/modules/packages/alpine"
+ arch_model "code.gitea.io/gitea/modules/packages/arch"
+ debian_module "code.gitea.io/gitea/modules/packages/debian"
+ rpm_module "code.gitea.io/gitea/modules/packages/rpm"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ packages_helper "code.gitea.io/gitea/routers/api/packages/helper"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ packages_service "code.gitea.io/gitea/services/packages"
+)
+
+const (
+ tplPackagesList base.TplName = "user/overview/packages"
+ tplPackagesView base.TplName = "package/view"
+ tplPackageVersionList base.TplName = "user/overview/package_versions"
+ tplPackagesSettings base.TplName = "package/settings"
+)
+
+// ListPackages displays a list of all packages of the context user
+func ListPackages(ctx *context.Context) {
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ query := ctx.FormTrim("q")
+ packageType := ctx.FormTrim("type")
+
+ pvs, total, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{
+ Paginator: &db.ListOptions{
+ PageSize: setting.UI.PackagesPagingNum,
+ Page: page,
+ },
+ OwnerID: ctx.ContextUser.ID,
+ Type: packages_model.Type(packageType),
+ Name: packages_model.SearchValue{Value: query},
+ IsInternal: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("SearchLatestVersions", err)
+ return
+ }
+
+ pds, err := packages_model.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptors", err)
+ return
+ }
+
+ repositoryAccessMap := make(map[int64]bool)
+ for _, pd := range pds {
+ if pd.Repository == nil {
+ continue
+ }
+ if _, has := repositoryAccessMap[pd.Repository.ID]; has {
+ continue
+ }
+
+ permission, err := access_model.GetUserRepoPermission(ctx, pd.Repository, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ repositoryAccessMap[pd.Repository.ID] = permission.HasAccess()
+ }
+
+ hasPackages, err := packages_model.HasOwnerPackages(ctx, ctx.ContextUser.ID)
+ if err != nil {
+ ctx.ServerError("HasOwnerPackages", err)
+ return
+ }
+
+ shared_user.RenderUserHeader(ctx)
+
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["Query"] = query
+ ctx.Data["PackageType"] = packageType
+ ctx.Data["AvailableTypes"] = packages_model.TypeList
+ ctx.Data["HasPackages"] = hasPackages
+ ctx.Data["PackageDescriptors"] = pds
+ ctx.Data["Total"] = total
+ ctx.Data["RepositoryAccessMap"] = repositoryAccessMap
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ // TODO: context/org -> HandleOrgAssignment() can not be used
+ if ctx.ContextUser.IsOrganization() {
+ org := org_model.OrgFromUser(ctx.ContextUser)
+ ctx.Data["Org"] = org
+ ctx.Data["OrgLink"] = ctx.ContextUser.OrganisationLink()
+
+ if ctx.Doer != nil {
+ ctx.Data["IsOrganizationMember"], _ = org_model.IsOrganizationMember(ctx, org.ID, ctx.Doer.ID)
+ ctx.Data["IsOrganizationOwner"], _ = org_model.IsOrganizationOwner(ctx, org.ID, ctx.Doer.ID)
+ } else {
+ ctx.Data["IsOrganizationMember"] = false
+ ctx.Data["IsOrganizationOwner"] = false
+ }
+ }
+
+ pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5)
+ pager.AddParam(ctx, "q", "Query")
+ pager.AddParam(ctx, "type", "PackageType")
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplPackagesList)
+}
+
+// RedirectToLastVersion redirects to the latest package version
+func RedirectToLastVersion(ctx *context.Context) {
+ p, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.Type(ctx.Params("type")), ctx.Params("name"))
+ if err != nil {
+ if err == packages_model.ErrPackageNotExist {
+ ctx.NotFound("GetPackageByName", err)
+ } else {
+ ctx.ServerError("GetPackageByName", err)
+ }
+ return
+ }
+
+ pvs, _, err := packages_model.SearchLatestVersions(ctx, &packages_model.PackageSearchOptions{
+ PackageID: p.ID,
+ IsInternal: optional.Some(false),
+ })
+ if err != nil {
+ ctx.ServerError("GetPackageByName", err)
+ return
+ }
+ if len(pvs) == 0 {
+ ctx.NotFound("", err)
+ return
+ }
+
+ pd, err := packages_model.GetPackageDescriptor(ctx, pvs[0])
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptor", err)
+ return
+ }
+
+ ctx.Redirect(pd.VersionWebLink())
+}
+
+// ViewPackageVersion displays a single package version
+func ViewPackageVersion(ctx *context.Context) {
+ pd := ctx.Package.Descriptor
+
+ shared_user.RenderUserHeader(ctx)
+
+ ctx.Data["Title"] = pd.Package.Name
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["PackageDescriptor"] = pd
+
+ switch pd.Package.Type {
+ case packages_model.TypeContainer:
+ ctx.Data["RegistryHost"] = setting.Packages.RegistryHost
+ case packages_model.TypeAlpine:
+ branches := make(container.Set[string])
+ repositories := make(container.Set[string])
+ architectures := make(container.Set[string])
+
+ for _, f := range pd.Files {
+ for _, pp := range f.Properties {
+ switch pp.Name {
+ case alpine_module.PropertyBranch:
+ branches.Add(pp.Value)
+ case alpine_module.PropertyRepository:
+ repositories.Add(pp.Value)
+ case alpine_module.PropertyArchitecture:
+ architectures.Add(pp.Value)
+ }
+ }
+ }
+
+ ctx.Data["Branches"] = util.Sorted(branches.Values())
+ ctx.Data["Repositories"] = util.Sorted(repositories.Values())
+ ctx.Data["Architectures"] = util.Sorted(architectures.Values())
+ case packages_model.TypeArch:
+ ctx.Data["RegistryHost"] = setting.Packages.RegistryHost
+ ctx.Data["SignMail"] = fmt.Sprintf("%s@noreply.%s", ctx.Package.Owner.Name, setting.Packages.RegistryHost)
+ groups := make(container.Set[string])
+ for _, f := range pd.Files {
+ for _, pp := range f.Properties {
+ if pp.Name == arch_model.PropertyDistribution {
+ groups.Add(pp.Value)
+ }
+ }
+ }
+ ctx.Data["Groups"] = util.Sorted(groups.Values())
+ case packages_model.TypeDebian:
+ distributions := make(container.Set[string])
+ components := make(container.Set[string])
+ architectures := make(container.Set[string])
+
+ for _, f := range pd.Files {
+ for _, pp := range f.Properties {
+ switch pp.Name {
+ case debian_module.PropertyDistribution:
+ distributions.Add(pp.Value)
+ case debian_module.PropertyComponent:
+ components.Add(pp.Value)
+ case debian_module.PropertyArchitecture:
+ architectures.Add(pp.Value)
+ }
+ }
+ }
+
+ ctx.Data["Distributions"] = util.Sorted(distributions.Values())
+ ctx.Data["Components"] = util.Sorted(components.Values())
+ ctx.Data["Architectures"] = util.Sorted(architectures.Values())
+ case packages_model.TypeRpm:
+ groups := make(container.Set[string])
+ architectures := make(container.Set[string])
+
+ for _, f := range pd.Files {
+ for _, pp := range f.Properties {
+ switch pp.Name {
+ case rpm_module.PropertyGroup:
+ groups.Add(pp.Value)
+ case rpm_module.PropertyArchitecture:
+ architectures.Add(pp.Value)
+ }
+ }
+ }
+
+ ctx.Data["Groups"] = util.Sorted(groups.Values())
+ ctx.Data["Architectures"] = util.Sorted(architectures.Values())
+ }
+
+ var (
+ total int64
+ pvs []*packages_model.PackageVersion
+ err error
+ )
+ switch pd.Package.Type {
+ case packages_model.TypeContainer:
+ pvs, total, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{
+ Paginator: db.NewAbsoluteListOptions(0, 5),
+ PackageID: pd.Package.ID,
+ IsTagged: true,
+ })
+ default:
+ pvs, total, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ Paginator: db.NewAbsoluteListOptions(0, 5),
+ PackageID: pd.Package.ID,
+ IsInternal: optional.Some(false),
+ })
+ }
+ if err != nil {
+ ctx.ServerError("", err)
+ return
+ }
+
+ ctx.Data["LatestVersions"] = pvs
+ ctx.Data["TotalVersionCount"] = total
+
+ ctx.Data["CanWritePackages"] = ctx.Package.AccessMode >= perm.AccessModeWrite || ctx.IsUserSiteAdmin()
+
+ hasRepositoryAccess := false
+ if pd.Repository != nil {
+ permission, err := access_model.GetUserRepoPermission(ctx, pd.Repository, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserRepoPermission", err)
+ return
+ }
+ hasRepositoryAccess = permission.HasAccess()
+ }
+ ctx.Data["HasRepositoryAccess"] = hasRepositoryAccess
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplPackagesView)
+}
+
+// ListPackageVersions lists all versions of a package
+func ListPackageVersions(ctx *context.Context) {
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ p, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.Type(ctx.Params("type")), ctx.Params("name"))
+ if err != nil {
+ if err == packages_model.ErrPackageNotExist {
+ ctx.NotFound("GetPackageByName", err)
+ } else {
+ ctx.ServerError("GetPackageByName", err)
+ }
+ return
+ }
+
+ page := ctx.FormInt("page")
+ if page <= 1 {
+ page = 1
+ }
+ pagination := &db.ListOptions{
+ PageSize: setting.UI.PackagesPagingNum,
+ Page: page,
+ }
+
+ query := ctx.FormTrim("q")
+ sort := ctx.FormTrim("sort")
+
+ shared_user.RenderUserHeader(ctx)
+
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["PackageDescriptor"] = &packages_model.PackageDescriptor{
+ Package: p,
+ Owner: ctx.Package.Owner,
+ }
+ ctx.Data["Query"] = query
+ ctx.Data["Sort"] = sort
+
+ pagerParams := map[string]string{
+ "q": query,
+ "sort": sort,
+ }
+
+ var (
+ total int64
+ pvs []*packages_model.PackageVersion
+ )
+ switch p.Type {
+ case packages_model.TypeContainer:
+ tagged := ctx.FormTrim("tagged")
+
+ pagerParams["tagged"] = tagged
+ ctx.Data["Tagged"] = tagged
+
+ pvs, total, err = container_model.SearchImageTags(ctx, &container_model.ImageTagsSearchOptions{
+ Paginator: pagination,
+ PackageID: p.ID,
+ Query: query,
+ IsTagged: tagged == "" || tagged == "tagged",
+ Sort: sort,
+ })
+ if err != nil {
+ ctx.ServerError("SearchImageTags", err)
+ return
+ }
+ default:
+ pvs, total, err = packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{
+ Paginator: pagination,
+ PackageID: p.ID,
+ Version: packages_model.SearchValue{
+ ExactMatch: false,
+ Value: query,
+ },
+ IsInternal: optional.Some(false),
+ Sort: sort,
+ })
+ if err != nil {
+ ctx.ServerError("SearchVersions", err)
+ return
+ }
+ }
+
+ ctx.Data["PackageDescriptors"], err = packages_model.GetPackageDescriptors(ctx, pvs)
+ if err != nil {
+ ctx.ServerError("GetPackageDescriptors", err)
+ return
+ }
+
+ ctx.Data["Total"] = total
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ pager := context.NewPagination(int(total), setting.UI.PackagesPagingNum, page, 5)
+ for k, v := range pagerParams {
+ pager.AddParamString(k, v)
+ }
+ ctx.Data["Page"] = pager
+
+ ctx.HTML(http.StatusOK, tplPackageVersionList)
+}
+
+// PackageSettings displays the package settings page
+func PackageSettings(ctx *context.Context) {
+ pd := ctx.Package.Descriptor
+
+ shared_user.RenderUserHeader(ctx)
+
+ ctx.Data["Title"] = pd.Package.Name
+ ctx.Data["IsPackagesPage"] = true
+ ctx.Data["PackageDescriptor"] = pd
+
+ repos, _, _ := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{
+ Actor: pd.Owner,
+ Private: true,
+ })
+ ctx.Data["Repos"] = repos
+ ctx.Data["CanWritePackages"] = ctx.Package.AccessMode >= perm.AccessModeWrite || ctx.IsUserSiteAdmin()
+
+ err := shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplPackagesSettings)
+}
+
+// PackageSettingsPost updates the package settings
+func PackageSettingsPost(ctx *context.Context) {
+ pd := ctx.Package.Descriptor
+
+ form := web.GetForm(ctx).(*forms.PackageSettingForm)
+ switch form.Action {
+ case "link":
+ success := func() bool {
+ repoID := int64(0)
+ if form.RepoID != 0 {
+ repo, err := repo_model.GetRepositoryByID(ctx, form.RepoID)
+ if err != nil {
+ log.Error("Error getting repository: %v", err)
+ return false
+ }
+
+ if repo.OwnerID != pd.Owner.ID {
+ return false
+ }
+
+ repoID = repo.ID
+ }
+
+ if err := packages_model.SetRepositoryLink(ctx, pd.Package.ID, repoID); err != nil {
+ log.Error("Error updating package: %v", err)
+ return false
+ }
+
+ return true
+ }()
+
+ if success {
+ ctx.Flash.Success(ctx.Tr("packages.settings.link.success"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("packages.settings.link.error"))
+ }
+
+ ctx.Redirect(ctx.Link)
+ return
+ case "delete":
+ err := packages_service.RemovePackageVersion(ctx, ctx.Doer, ctx.Package.Descriptor.Version)
+ if err != nil {
+ log.Error("Error deleting package: %v", err)
+ ctx.Flash.Error(ctx.Tr("packages.settings.delete.error"))
+ } else {
+ ctx.Flash.Success(ctx.Tr("packages.settings.delete.success"))
+ }
+
+ redirectURL := ctx.Package.Owner.HomeLink() + "/-/packages"
+ // redirect to the package if there are still versions available
+ if has, _ := packages_model.ExistVersion(ctx, &packages_model.PackageSearchOptions{PackageID: ctx.Package.Descriptor.Package.ID, IsInternal: optional.Some(false)}); has {
+ redirectURL = ctx.Package.Descriptor.PackageWebLink()
+ }
+
+ ctx.Redirect(redirectURL)
+ return
+ }
+}
+
+// DownloadPackageFile serves the content of a package file
+func DownloadPackageFile(ctx *context.Context) {
+ pf, err := packages_model.GetFileForVersionByID(ctx, ctx.Package.Descriptor.Version.ID, ctx.ParamsInt64(":fileid"))
+ if err != nil {
+ if err == packages_model.ErrPackageFileNotExist {
+ ctx.NotFound("", err)
+ } else {
+ ctx.ServerError("GetFileForVersionByID", err)
+ }
+ return
+ }
+
+ s, u, _, err := packages_service.GetPackageFileStream(ctx, pf)
+ if err != nil {
+ ctx.ServerError("GetPackageFileStream", err)
+ return
+ }
+
+ packages_helper.ServePackageFile(ctx, s, u, pf)
+}
diff --git a/routers/web/user/profile.go b/routers/web/user/profile.go
new file mode 100644
index 0000000..9cb392d
--- /dev/null
+++ b/routers/web/user/profile.go
@@ -0,0 +1,385 @@
+// Copyright 2015 The Gogs Authors. All rights reserved.
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "errors"
+ "fmt"
+ "net/http"
+ "path"
+ "strings"
+
+ activities_model "code.gitea.io/gitea/models/activities"
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/markup"
+ "code.gitea.io/gitea/modules/markup/markdown"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/routers/web/org"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+const (
+ tplProfileBigAvatar base.TplName = "shared/user/profile_big_avatar"
+ tplFollowUnfollow base.TplName = "org/follow_unfollow"
+)
+
+// OwnerProfile render profile page for a user or a organization (aka, repo owner)
+func OwnerProfile(ctx *context.Context) {
+ if strings.Contains(ctx.Req.Header.Get("Accept"), "application/rss+xml") {
+ feed.ShowUserFeedRSS(ctx)
+ return
+ }
+ if strings.Contains(ctx.Req.Header.Get("Accept"), "application/atom+xml") {
+ feed.ShowUserFeedAtom(ctx)
+ return
+ }
+
+ if ctx.ContextUser.IsOrganization() {
+ org.Home(ctx)
+ } else {
+ userProfile(ctx)
+ }
+}
+
+func userProfile(ctx *context.Context) {
+ // check view permissions
+ if !user_model.IsUserVisibleToViewer(ctx, ctx.ContextUser, ctx.Doer) {
+ ctx.NotFound("User not visible", nil)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.ContextUser.DisplayName()
+ ctx.Data["PageIsUserProfile"] = true
+
+ // prepare heatmap data
+ if setting.Service.EnableUserHeatmap {
+ data, err := activities_model.GetUserHeatmapDataByUser(ctx, ctx.ContextUser, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserHeatmapDataByUser", err)
+ return
+ }
+ ctx.Data["HeatmapData"] = data
+ ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
+ }
+
+ profileDbRepo, profileGitRepo, profileReadmeBlob, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
+ defer profileClose()
+
+ showPrivate := ctx.IsSigned && (ctx.Doer.IsAdmin || ctx.Doer.ID == ctx.ContextUser.ID)
+ prepareUserProfileTabData(ctx, showPrivate, profileDbRepo, profileGitRepo, profileReadmeBlob)
+ // call PrepareContextForProfileBigAvatar later to avoid re-querying the NumFollowers & NumFollowing
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ ctx.HTML(http.StatusOK, tplProfile)
+}
+
+func prepareUserProfileTabData(ctx *context.Context, showPrivate bool, profileDbRepo *repo_model.Repository, profileGitRepo *git.Repository, profileReadme *git.Blob) {
+ // if there is a profile readme, default to "overview" page, otherwise, default to "repositories" page
+ // if there is not a profile readme, the overview tab should be treated as the repositories tab
+ tab := ctx.FormString("tab")
+ if tab == "" || tab == "overview" {
+ if profileReadme != nil {
+ tab = "overview"
+ } else {
+ tab = "repositories"
+ }
+ }
+ ctx.Data["TabName"] = tab
+ ctx.Data["HasProfileReadme"] = profileReadme != nil
+
+ page := ctx.FormInt("page")
+ if page <= 0 {
+ page = 1
+ }
+
+ pagingNum := setting.UI.User.RepoPagingNum
+ topicOnly := ctx.FormBool("topic")
+ var (
+ repos []*repo_model.Repository
+ count int64
+ total int
+ orderBy db.SearchOrderBy
+ )
+
+ sortOrder := ctx.FormString("sort")
+ if _, ok := repo_model.OrderByFlatMap[sortOrder]; !ok {
+ sortOrder = setting.UI.ExploreDefaultSort // TODO: add new default sort order for user home?
+ }
+ ctx.Data["SortType"] = sortOrder
+ orderBy = repo_model.OrderByFlatMap[sortOrder]
+
+ keyword := ctx.FormTrim("q")
+ ctx.Data["Keyword"] = keyword
+
+ language := ctx.FormTrim("language")
+ ctx.Data["Language"] = language
+
+ followers, numFollowers, err := user_model.GetUserFollowers(ctx, ctx.ContextUser, ctx.Doer, db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ })
+ if err != nil {
+ ctx.ServerError("GetUserFollowers", err)
+ return
+ }
+ ctx.Data["NumFollowers"] = numFollowers
+ following, numFollowing, err := user_model.GetUserFollowing(ctx, ctx.ContextUser, ctx.Doer, db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ })
+ if err != nil {
+ ctx.ServerError("GetUserFollowing", err)
+ return
+ }
+ ctx.Data["NumFollowing"] = numFollowing
+
+ archived := ctx.FormOptionalBool("archived")
+ ctx.Data["IsArchived"] = archived
+
+ fork := ctx.FormOptionalBool("fork")
+ ctx.Data["IsFork"] = fork
+
+ mirror := ctx.FormOptionalBool("mirror")
+ ctx.Data["IsMirror"] = mirror
+
+ template := ctx.FormOptionalBool("template")
+ ctx.Data["IsTemplate"] = template
+
+ private := ctx.FormOptionalBool("private")
+ ctx.Data["IsPrivate"] = private
+
+ switch tab {
+ case "followers":
+ ctx.Data["Cards"] = followers
+ total = int(numFollowers)
+ ctx.Data["CardsTitle"] = ctx.TrN(total, "user.followers.title.one", "user.followers.title.few")
+ case "following":
+ ctx.Data["Cards"] = following
+ total = int(numFollowing)
+ ctx.Data["CardsTitle"] = ctx.TrN(total, "user.following.title.one", "user.following.title.few")
+ case "activity":
+ date := ctx.FormString("date")
+ pagingNum = setting.UI.FeedPagingNum
+ items, count, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
+ RequestedUser: ctx.ContextUser,
+ Actor: ctx.Doer,
+ IncludePrivate: showPrivate,
+ OnlyPerformedBy: true,
+ IncludeDeleted: false,
+ Date: date,
+ ListOptions: db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ },
+ })
+ if err != nil {
+ ctx.ServerError("GetFeeds", err)
+ return
+ }
+ ctx.Data["Feeds"] = items
+ ctx.Data["Date"] = date
+
+ total = int(count)
+ case "stars":
+ ctx.Data["PageIsProfileStarList"] = true
+ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ },
+ Actor: ctx.Doer,
+ Keyword: keyword,
+ OrderBy: orderBy,
+ Private: ctx.IsSigned,
+ StarredByID: ctx.ContextUser.ID,
+ Collaborate: optional.Some(false),
+ TopicOnly: topicOnly,
+ Language: language,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+
+ total = int(count)
+ case "watching":
+ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ },
+ Actor: ctx.Doer,
+ Keyword: keyword,
+ OrderBy: orderBy,
+ Private: ctx.IsSigned,
+ WatchedByID: ctx.ContextUser.ID,
+ Collaborate: optional.Some(false),
+ TopicOnly: topicOnly,
+ Language: language,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+
+ total = int(count)
+ case "overview":
+ if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("failed to GetBlobContent: %v", err)
+ } else {
+ if profileContent, err := markdown.RenderString(&markup.RenderContext{
+ Ctx: ctx,
+ GitRepo: profileGitRepo,
+ Links: markup.Links{
+ // Give the repo link to the markdown render for the full link of media element.
+ // the media link usually be like /[user]/[repoName]/media/branch/[branchName],
+ // Eg. /Tom/.profile/media/branch/main
+ // The branch shown on the profile page is the default branch, this need to be in sync with doc, see:
+ // https://docs.gitea.com/usage/profile-readme
+ Base: profileDbRepo.Link(),
+ BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
+ },
+ Metas: map[string]string{"mode": "document"},
+ }, bytes); err != nil {
+ log.Error("failed to RenderString: %v", err)
+ } else {
+ ctx.Data["ProfileReadme"] = profileContent
+ }
+ }
+ default: // default to "repositories"
+ repos, count, err = repo_model.SearchRepository(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: pagingNum,
+ Page: page,
+ },
+ Actor: ctx.Doer,
+ Keyword: keyword,
+ OwnerID: ctx.ContextUser.ID,
+ OrderBy: orderBy,
+ Private: ctx.IsSigned,
+ Collaborate: optional.Some(false),
+ TopicOnly: topicOnly,
+ Language: language,
+ IncludeDescription: setting.UI.SearchRepoDescription,
+ Archived: archived,
+ Fork: fork,
+ Mirror: mirror,
+ Template: template,
+ IsPrivate: private,
+ })
+ if err != nil {
+ ctx.ServerError("SearchRepository", err)
+ return
+ }
+
+ total = int(count)
+ }
+ ctx.Data["Repos"] = repos
+ ctx.Data["Total"] = total
+
+ err = shared_user.LoadHeaderCount(ctx)
+ if err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+
+ pager := context.NewPagination(total, pagingNum, page, 5)
+ pager.SetDefaultParams(ctx)
+ pager.AddParam(ctx, "tab", "TabName")
+ if tab != "followers" && tab != "following" && tab != "activity" && tab != "projects" {
+ pager.AddParam(ctx, "language", "Language")
+ }
+ if tab == "activity" {
+ pager.AddParam(ctx, "date", "Date")
+ }
+ if archived.Has() {
+ pager.AddParamString("archived", fmt.Sprint(archived.Value()))
+ }
+ if fork.Has() {
+ pager.AddParamString("fork", fmt.Sprint(fork.Value()))
+ }
+ if mirror.Has() {
+ pager.AddParamString("mirror", fmt.Sprint(mirror.Value()))
+ }
+ if template.Has() {
+ pager.AddParamString("template", fmt.Sprint(template.Value()))
+ }
+ if private.Has() {
+ pager.AddParamString("private", fmt.Sprint(private.Value()))
+ }
+ ctx.Data["Page"] = pager
+}
+
+// Action response for follow/unfollow user request
+func Action(ctx *context.Context) {
+ var err error
+ action := ctx.FormString("action")
+
+ if ctx.ContextUser.IsOrganization() && (action == "block" || action == "unblock") {
+ log.Error("Cannot perform this action on an organization %q", ctx.FormString("action"))
+ ctx.JSONError(fmt.Sprintf("Action %q failed", ctx.FormString("action")))
+ return
+ }
+
+ switch action {
+ case "follow":
+ err = user_model.FollowUser(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ case "unfollow":
+ err = user_model.UnfollowUser(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ case "block":
+ err = user_service.BlockUser(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ case "unblock":
+ err = user_model.UnblockUser(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ }
+
+ if err != nil {
+ if !errors.Is(err, user_model.ErrBlockedByUser) {
+ log.Error("Failed to apply action %q: %v", ctx.FormString("action"), err)
+ ctx.Error(http.StatusBadRequest, fmt.Sprintf("Action %q failed", ctx.FormString("action")))
+ return
+ }
+
+ if ctx.ContextUser.IsOrganization() {
+ ctx.Flash.Error(ctx.Tr("org.follow_blocked_user"), true)
+ } else {
+ ctx.Flash.Error(ctx.Tr("user.follow_blocked_user"), true)
+ }
+ }
+
+ if ctx.ContextUser.IsIndividual() {
+ shared_user.PrepareContextForProfileBigAvatar(ctx)
+ ctx.Data["IsHTMX"] = true
+ ctx.HTML(http.StatusOK, tplProfileBigAvatar)
+ return
+ } else if ctx.ContextUser.IsOrganization() {
+ ctx.Data["Org"] = ctx.ContextUser
+ ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
+ ctx.HTML(http.StatusOK, tplFollowUnfollow)
+ return
+ }
+ log.Error("Failed to apply action %q: unsupported context user type: %s", ctx.FormString("action"), ctx.ContextUser.Type)
+ ctx.Error(http.StatusBadRequest, fmt.Sprintf("Action %q failed", ctx.FormString("action")))
+}
diff --git a/routers/web/user/search.go b/routers/web/user/search.go
new file mode 100644
index 0000000..fb7729b
--- /dev/null
+++ b/routers/web/user/search.go
@@ -0,0 +1,44 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+)
+
+// Search search users
+func Search(ctx *context.Context) {
+ listOptions := db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ }
+
+ users, maxResults, err := user_model.SearchUsers(ctx, &user_model.SearchUserOptions{
+ Actor: ctx.Doer,
+ Keyword: ctx.FormTrim("q"),
+ UID: ctx.FormInt64("uid"),
+ Type: user_model.UserTypeIndividual,
+ IsActive: ctx.FormOptionalBool("active"),
+ ListOptions: listOptions,
+ })
+ if err != nil {
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "ok": false,
+ "error": err.Error(),
+ })
+ return
+ }
+
+ ctx.SetTotalCountHeader(maxResults)
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "ok": true,
+ "data": convert.ToUsers(ctx, ctx.Doer, users),
+ })
+}
diff --git a/routers/web/user/setting/account.go b/routers/web/user/setting/account.go
new file mode 100644
index 0000000..3a2527c
--- /dev/null
+++ b/routers/web/user/setting/account.go
@@ -0,0 +1,344 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "net/http"
+ "time"
+
+ "code.gitea.io/gitea/models"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/timeutil"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/auth/source/db"
+ "code.gitea.io/gitea/services/auth/source/smtp"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+ "code.gitea.io/gitea/services/user"
+)
+
+const (
+ tplSettingsAccount base.TplName = "user/settings/account"
+)
+
+// Account renders change user's password, user's email and user suicide page
+func Account(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.account")
+ ctx.Data["PageIsSettingsAccount"] = true
+ ctx.Data["Email"] = ctx.Doer.Email
+ ctx.Data["EnableNotifyMail"] = setting.Service.EnableNotifyMail
+
+ loadAccountData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsAccount)
+}
+
+// AccountPost response for change user's password
+func AccountPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.ChangePasswordForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAccount"] = true
+
+ if ctx.HasError() {
+ loadAccountData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsAccount)
+ return
+ }
+
+ if ctx.Doer.IsPasswordSet() && !ctx.Doer.ValidatePassword(form.OldPassword) {
+ ctx.Flash.Error(ctx.Tr("settings.password_incorrect"))
+ } else if form.Password != form.Retype {
+ ctx.Flash.Error(ctx.Tr("form.password_not_match"))
+ } else {
+ opts := &user.UpdateAuthOptions{
+ Password: optional.Some(form.Password),
+ MustChangePassword: optional.Some(false),
+ }
+ if err := user.UpdateAuth(ctx, ctx.Doer, opts); err != nil {
+ switch {
+ case errors.Is(err, password.ErrMinLength):
+ ctx.Flash.Error(ctx.Tr("auth.password_too_short", setting.MinPasswordLength))
+ case errors.Is(err, password.ErrComplexity):
+ ctx.Flash.Error(password.BuildComplexityError(ctx.Locale))
+ case errors.Is(err, password.ErrIsPwned):
+ ctx.Flash.Error(ctx.Tr("auth.password_pwned", "https://haveibeenpwned.com/Passwords"))
+ case password.IsErrIsPwnedRequest(err):
+ ctx.Flash.Error(ctx.Tr("auth.password_pwned_err"))
+ default:
+ ctx.ServerError("UpdateAuth", err)
+ return
+ }
+ } else {
+ // Re-generate LTA cookie.
+ if len(ctx.GetSiteCookie(setting.CookieRememberName)) != 0 {
+ if err := ctx.SetLTACookie(ctx.Doer); err != nil {
+ ctx.ServerError("SetLTACookie", err)
+ return
+ }
+ }
+
+ log.Trace("User password updated: %s", ctx.Doer.Name)
+ ctx.Flash.Success(ctx.Tr("settings.change_password_success"))
+ }
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+}
+
+// EmailPost response for change user's email
+func EmailPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AddEmailForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAccount"] = true
+
+ // Make emailaddress primary.
+ if ctx.FormString("_method") == "PRIMARY" {
+ id := ctx.FormInt64("id")
+ email, err := user_model.GetEmailAddressByID(ctx, ctx.Doer.ID, id)
+ if err != nil {
+ log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.Doer.ID, id, err)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+
+ if err := user.MakeEmailAddressPrimary(ctx, ctx.Doer, email, true); err != nil {
+ ctx.ServerError("MakeEmailPrimary", err)
+ return
+ }
+
+ log.Trace("Email made primary: %s", ctx.Doer.Name)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ // Send activation Email
+ if ctx.FormString("_method") == "SENDACTIVATION" {
+ var address string
+ if ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName) {
+ log.Error("Send activation: activation still pending")
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+
+ id := ctx.FormInt64("id")
+ email, err := user_model.GetEmailAddressByID(ctx, ctx.Doer.ID, id)
+ if err != nil {
+ log.Error("GetEmailAddressByID(%d,%d) error: %v", ctx.Doer.ID, id, err)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ if email == nil {
+ log.Warn("Send activation failed: EmailAddress[%d] not found for user: %-v", id, ctx.Doer)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ if email.IsActivated {
+ log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.Doer)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ if email.IsPrimary {
+ if ctx.Doer.IsActive && !setting.Service.RegisterEmailConfirm {
+ log.Debug("Send activation failed: email %s is already activated for user: %-v", email.Email, ctx.Doer)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ // Only fired when the primary email is inactive (Wrong state)
+ mailer.SendActivateAccountMail(ctx.Locale, ctx.Doer)
+ } else {
+ mailer.SendActivateEmailMail(ctx.Doer, email.Email)
+ }
+ address = email.Email
+
+ if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
+ log.Error("Set cache(MailResendLimit) fail: %v", err)
+ }
+
+ ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", address, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+ // Set Email Notification Preference
+ if ctx.FormString("_method") == "NOTIFICATION" {
+ preference := ctx.FormString("preference")
+ if !(preference == user_model.EmailNotificationsEnabled ||
+ preference == user_model.EmailNotificationsOnMention ||
+ preference == user_model.EmailNotificationsDisabled ||
+ preference == user_model.EmailNotificationsAndYourOwn) {
+ log.Error("Email notifications preference change returned unrecognized option %s: %s", preference, ctx.Doer.Name)
+ ctx.ServerError("SetEmailPreference", errors.New("option unrecognized"))
+ return
+ }
+ opts := &user.UpdateOptions{
+ EmailNotificationsPreference: optional.Some(preference),
+ }
+ if err := user.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ log.Error("Set Email Notifications failed: %v", err)
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+ log.Trace("Email notifications preference made %s: %s", preference, ctx.Doer.Name)
+ ctx.Flash.Success(ctx.Tr("settings.email_preference_set_success"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+
+ if ctx.HasError() {
+ loadAccountData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsAccount)
+ return
+ }
+
+ if err := user.AddEmailAddresses(ctx, ctx.Doer, []string{form.Email}); err != nil {
+ if user_model.IsErrEmailAlreadyUsed(err) {
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplSettingsAccount, &form)
+ } else if user_model.IsErrEmailCharIsNotSupported(err) || user_model.IsErrEmailInvalid(err) {
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplSettingsAccount, &form)
+ } else {
+ ctx.ServerError("AddEmailAddresses", err)
+ }
+ return
+ }
+
+ // Send confirmation email
+ if setting.Service.RegisterEmailConfirm {
+ mailer.SendActivateEmailMail(ctx.Doer, form.Email)
+ if err := ctx.Cache.Put("MailResendLimit_"+ctx.Doer.LowerName, ctx.Doer.LowerName, 180); err != nil {
+ log.Error("Set cache(MailResendLimit) fail: %v", err)
+ }
+
+ ctx.Flash.Info(ctx.Tr("settings.add_email_confirmation_sent", form.Email, timeutil.MinutesToFriendly(setting.Service.ActiveCodeLives, ctx.Locale)))
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.add_email_success"))
+ }
+
+ log.Trace("Email address added: %s", form.Email)
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+}
+
+// DeleteEmail response for delete user's email
+func DeleteEmail(ctx *context.Context) {
+ email, err := user_model.GetEmailAddressByID(ctx, ctx.Doer.ID, ctx.FormInt64("id"))
+ if err != nil || email == nil {
+ ctx.ServerError("GetEmailAddressByID", err)
+ return
+ }
+
+ if err := user.DeleteEmailAddresses(ctx, ctx.Doer, []string{email.Email}); err != nil {
+ ctx.ServerError("DeleteEmailAddresses", err)
+ return
+ }
+ log.Trace("Email address deleted: %s", ctx.Doer.Name)
+
+ ctx.Flash.Success(ctx.Tr("settings.email_deletion_success"))
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/account")
+}
+
+// DeleteAccount render user suicide page and response for delete user himself
+func DeleteAccount(ctx *context.Context) {
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureDeletion) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAccount"] = true
+
+ if _, _, err := auth.UserSignIn(ctx, ctx.Doer.Name, ctx.FormString("password")); err != nil {
+ switch {
+ case user_model.IsErrUserNotExist(err):
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.user_not_exist"), tplSettingsAccount, nil)
+ case errors.Is(err, smtp.ErrUnsupportedLoginType):
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.unsupported_login_type"), tplSettingsAccount, nil)
+ case errors.As(err, &db.ErrUserPasswordNotSet{}):
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.unset_password"), tplSettingsAccount, nil)
+ case errors.As(err, &db.ErrUserPasswordInvalid{}):
+ loadAccountData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.enterred_invalid_password"), tplSettingsAccount, nil)
+ default:
+ ctx.ServerError("UserSignIn", err)
+ }
+ return
+ }
+
+ // admin should not delete themself
+ if ctx.Doer.IsAdmin {
+ ctx.Flash.Error(ctx.Tr("form.admin_cannot_delete_self"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ return
+ }
+
+ if err := user.DeleteUser(ctx, ctx.Doer, false); err != nil {
+ switch {
+ case models.IsErrUserOwnRepos(err):
+ ctx.Flash.Error(ctx.Tr("form.still_own_repo"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ case models.IsErrUserHasOrgs(err):
+ ctx.Flash.Error(ctx.Tr("form.still_has_org"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ case models.IsErrUserOwnPackages(err):
+ ctx.Flash.Error(ctx.Tr("form.still_own_packages"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ case models.IsErrDeleteLastAdminUser(err):
+ ctx.Flash.Error(ctx.Tr("auth.last_admin"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ default:
+ ctx.ServerError("DeleteUser", err)
+ }
+ } else {
+ log.Trace("Account deleted: %s", ctx.Doer.Name)
+ ctx.Redirect(setting.AppSubURL + "/")
+ }
+}
+
+func loadAccountData(ctx *context.Context) {
+ emlist, err := user_model.GetEmailAddresses(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetEmailAddresses", err)
+ return
+ }
+ type UserEmail struct {
+ user_model.EmailAddress
+ CanBePrimary bool
+ }
+ pendingActivation := ctx.Cache.IsExist("MailResendLimit_" + ctx.Doer.LowerName)
+ emails := make([]*UserEmail, len(emlist))
+ for i, em := range emlist {
+ var email UserEmail
+ email.EmailAddress = *em
+ email.CanBePrimary = em.IsActivated
+ emails[i] = &email
+ }
+ ctx.Data["Emails"] = emails
+ ctx.Data["EmailNotificationsPreference"] = ctx.Doer.EmailNotificationsPreference
+ ctx.Data["ActivationsPending"] = pendingActivation
+ ctx.Data["CanAddEmails"] = !pendingActivation || !setting.Service.RegisterEmailConfirm
+ ctx.Data["UserDisabledFeatures"] = user_model.DisabledFeaturesWithLoginType(ctx.Doer)
+
+ if setting.Service.UserDeleteWithCommentsMaxTime != 0 {
+ ctx.Data["UserDeleteWithCommentsMaxTime"] = setting.Service.UserDeleteWithCommentsMaxTime.String()
+ ctx.Data["UserDeleteWithComments"] = ctx.Doer.CreatedUnix.AsTime().Add(setting.Service.UserDeleteWithCommentsMaxTime).After(time.Now())
+ }
+}
diff --git a/routers/web/user/setting/account_test.go b/routers/web/user/setting/account_test.go
new file mode 100644
index 0000000..9fdc5e4
--- /dev/null
+++ b/routers/web/user/setting/account_test.go
@@ -0,0 +1,101 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/contexttest"
+ "code.gitea.io/gitea/services/forms"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestChangePassword(t *testing.T) {
+ oldPassword := "password"
+ setting.MinPasswordLength = 6
+ pcALL := []string{"lower", "upper", "digit", "spec"}
+ pcLUN := []string{"lower", "upper", "digit"}
+ pcLU := []string{"lower", "upper"}
+
+ for _, req := range []struct {
+ OldPassword string
+ NewPassword string
+ Retype string
+ Message string
+ PasswordComplexity []string
+ }{
+ {
+ OldPassword: oldPassword,
+ NewPassword: "Qwerty123456-",
+ Retype: "Qwerty123456-",
+ Message: "",
+ PasswordComplexity: pcALL,
+ },
+ {
+ OldPassword: oldPassword,
+ NewPassword: "12345",
+ Retype: "12345",
+ Message: "auth.password_too_short",
+ PasswordComplexity: pcALL,
+ },
+ {
+ OldPassword: "12334",
+ NewPassword: "123456",
+ Retype: "123456",
+ Message: "settings.password_incorrect",
+ PasswordComplexity: pcALL,
+ },
+ {
+ OldPassword: oldPassword,
+ NewPassword: "123456",
+ Retype: "12345",
+ Message: "form.password_not_match",
+ PasswordComplexity: pcALL,
+ },
+ {
+ OldPassword: oldPassword,
+ NewPassword: "Qwerty",
+ Retype: "Qwerty",
+ Message: "form.password_complexity",
+ PasswordComplexity: pcALL,
+ },
+ {
+ OldPassword: oldPassword,
+ NewPassword: "Qwerty",
+ Retype: "Qwerty",
+ Message: "form.password_complexity",
+ PasswordComplexity: pcLUN,
+ },
+ {
+ OldPassword: oldPassword,
+ NewPassword: "QWERTY",
+ Retype: "QWERTY",
+ Message: "form.password_complexity",
+ PasswordComplexity: pcLU,
+ },
+ } {
+ t.Run(req.OldPassword+"__"+req.NewPassword, func(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ setting.PasswordComplexity = req.PasswordComplexity
+ ctx, _ := contexttest.MockContext(t, "user/settings/security")
+ contexttest.LoadUser(t, ctx, 2)
+ contexttest.LoadRepo(t, ctx, 1)
+
+ web.SetForm(ctx, &forms.ChangePasswordForm{
+ OldPassword: req.OldPassword,
+ Password: req.NewPassword,
+ Retype: req.Retype,
+ })
+ AccountPost(ctx)
+
+ assert.Contains(t, ctx.Flash.ErrorMsg, req.Message)
+ assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ })
+ }
+}
diff --git a/routers/web/user/setting/adopt.go b/routers/web/user/setting/adopt.go
new file mode 100644
index 0000000..171c193
--- /dev/null
+++ b/routers/web/user/setting/adopt.go
@@ -0,0 +1,64 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/context"
+ repo_service "code.gitea.io/gitea/services/repository"
+)
+
+// AdoptOrDeleteRepository adopts or deletes a repository
+func AdoptOrDeleteRepository(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.adopt")
+ ctx.Data["PageIsSettingsRepos"] = true
+ allowAdopt := ctx.IsUserSiteAdmin() || setting.Repository.AllowAdoptionOfUnadoptedRepositories
+ ctx.Data["allowAdopt"] = allowAdopt
+ allowDelete := ctx.IsUserSiteAdmin() || setting.Repository.AllowDeleteOfUnadoptedRepositories
+ ctx.Data["allowDelete"] = allowDelete
+
+ dir := ctx.FormString("id")
+ action := ctx.FormString("action")
+
+ ctxUser := ctx.Doer
+ root := user_model.UserPath(ctxUser.LowerName)
+
+ // check not a repo
+ has, err := repo_model.IsRepositoryModelExist(ctx, ctxUser, dir)
+ if err != nil {
+ ctx.ServerError("IsRepositoryExist", err)
+ return
+ }
+
+ isDir, err := util.IsDir(filepath.Join(root, dir+".git"))
+ if err != nil {
+ ctx.ServerError("IsDir", err)
+ return
+ }
+ if has || !isDir {
+ // Fallthrough to failure mode
+ } else if action == "adopt" && allowAdopt {
+ if _, err := repo_service.AdoptRepository(ctx, ctxUser, ctxUser, repo_service.CreateRepoOptions{
+ Name: dir,
+ IsPrivate: true,
+ }); err != nil {
+ ctx.ServerError("repository.AdoptRepository", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.adopt_preexisting_success", dir))
+ } else if action == "delete" && allowDelete {
+ if err := repo_service.DeleteUnadoptedRepository(ctx, ctxUser, ctxUser, dir); err != nil {
+ ctx.ServerError("repository.AdoptRepository", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("repo.delete_preexisting_success", dir))
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/repos")
+}
diff --git a/routers/web/user/setting/applications.go b/routers/web/user/setting/applications.go
new file mode 100644
index 0000000..24ebf9b
--- /dev/null
+++ b/routers/web/user/setting/applications.go
@@ -0,0 +1,115 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplSettingsApplications base.TplName = "user/settings/applications"
+)
+
+// Applications render manage access token page
+func Applications(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.applications")
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ loadApplicationsData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsApplications)
+}
+
+// ApplicationsPost response for add user's access token
+func ApplicationsPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.NewAccessTokenForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ if ctx.HasError() {
+ loadApplicationsData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsApplications)
+ return
+ }
+
+ scope, err := form.GetScope()
+ if err != nil {
+ ctx.ServerError("GetScope", err)
+ return
+ }
+ t := &auth_model.AccessToken{
+ UID: ctx.Doer.ID,
+ Name: form.Name,
+ Scope: scope,
+ }
+
+ exist, err := auth_model.AccessTokenByNameExists(ctx, t)
+ if err != nil {
+ ctx.ServerError("AccessTokenByNameExists", err)
+ return
+ }
+ if exist {
+ ctx.Flash.Error(ctx.Tr("settings.generate_token_name_duplicate", t.Name))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/applications")
+ return
+ }
+
+ if err := auth_model.NewAccessToken(ctx, t); err != nil {
+ ctx.ServerError("NewAccessToken", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.generate_token_success"))
+ ctx.Flash.Info(t.Token)
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/applications")
+}
+
+// DeleteApplication response for delete user access token
+func DeleteApplication(ctx *context.Context) {
+ if err := auth_model.DeleteAccessTokenByID(ctx, ctx.FormInt64("id"), ctx.Doer.ID); err != nil {
+ ctx.Flash.Error("DeleteAccessTokenByID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.delete_token_success"))
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/applications")
+}
+
+func loadApplicationsData(ctx *context.Context) {
+ ctx.Data["AccessTokenScopePublicOnly"] = auth_model.AccessTokenScopePublicOnly
+ tokens, err := db.Find[auth_model.AccessToken](ctx, auth_model.ListAccessTokensOptions{UserID: ctx.Doer.ID})
+ if err != nil {
+ ctx.ServerError("ListAccessTokens", err)
+ return
+ }
+ ctx.Data["Tokens"] = tokens
+ ctx.Data["EnableOAuth2"] = setting.OAuth2.Enabled
+ ctx.Data["IsAdmin"] = ctx.Doer.IsAdmin
+ if setting.OAuth2.Enabled {
+ ctx.Data["Applications"], err = db.Find[auth_model.OAuth2Application](ctx, auth_model.FindOAuth2ApplicationsOptions{
+ OwnerID: ctx.Doer.ID,
+ })
+ if err != nil {
+ ctx.ServerError("GetOAuth2ApplicationsByUserID", err)
+ return
+ }
+ ctx.Data["Grants"], err = auth_model.GetOAuth2GrantsByUserID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetOAuth2GrantsByUserID", err)
+ return
+ }
+ ctx.Data["EnableAdditionalGrantScopes"] = setting.OAuth2.EnableAdditionalGrantScopes
+ }
+}
diff --git a/routers/web/user/setting/blocked_users.go b/routers/web/user/setting/blocked_users.go
new file mode 100644
index 0000000..3f35b2e
--- /dev/null
+++ b/routers/web/user/setting/blocked_users.go
@@ -0,0 +1,46 @@
+// Copyright 2023 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsBlockedUsers base.TplName = "user/settings/blocked_users"
+)
+
+// BlockedUsers render the blocked users list page.
+func BlockedUsers(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.blocked_users")
+ ctx.Data["PageIsBlockedUsers"] = true
+ ctx.Data["BaseLink"] = setting.AppSubURL + "/user/settings/blocked_users"
+ ctx.Data["BaseLinkNew"] = setting.AppSubURL + "/user/settings/blocked_users"
+
+ blockedUsers, err := user_model.ListBlockedUsers(ctx, ctx.Doer.ID, db.ListOptions{})
+ if err != nil {
+ ctx.ServerError("ListBlockedUsers", err)
+ return
+ }
+
+ ctx.Data["BlockedUsers"] = blockedUsers
+ ctx.HTML(http.StatusOK, tplSettingsBlockedUsers)
+}
+
+// UnblockUser unblocks a particular user for the doer.
+func UnblockUser(ctx *context.Context) {
+ if err := user_model.UnblockUser(ctx, ctx.Doer.ID, ctx.FormInt64("user_id")); err != nil {
+ ctx.ServerError("UnblockUser", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.user_unblock_success"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/blocked_users")
+}
diff --git a/routers/web/user/setting/keys.go b/routers/web/user/setting/keys.go
new file mode 100644
index 0000000..9462be7
--- /dev/null
+++ b/routers/web/user/setting/keys.go
@@ -0,0 +1,338 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/http"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ asymkey_service "code.gitea.io/gitea/services/asymkey"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+const (
+ tplSettingsKeys base.TplName = "user/settings/keys"
+)
+
+// Keys render user's SSH/GPG public keys page
+func Keys(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.ssh_gpg_keys")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+ ctx.Data["BuiltinSSH"] = setting.SSH.StartBuiltinServer
+ ctx.Data["AllowPrincipals"] = setting.SSH.AuthorizedPrincipalsEnabled
+
+ loadKeysData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsKeys)
+}
+
+// KeysPost response for change user's SSH/GPG keys
+func KeysPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AddKeyForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsKeys"] = true
+ ctx.Data["DisableSSH"] = setting.SSH.Disabled
+ ctx.Data["BuiltinSSH"] = setting.SSH.StartBuiltinServer
+ ctx.Data["AllowPrincipals"] = setting.SSH.AuthorizedPrincipalsEnabled
+
+ if ctx.HasError() {
+ loadKeysData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsKeys)
+ return
+ }
+ switch form.Type {
+ case "principal":
+ content, err := asymkey_model.CheckPrincipalKeyString(ctx, ctx.Doer, form.Content)
+ if err != nil {
+ if db.IsErrSSHDisabled(err) {
+ ctx.Flash.Info(ctx.Tr("settings.ssh_disabled"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("form.invalid_ssh_principal", err.Error()))
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ return
+ }
+ if _, err = asymkey_model.AddPrincipalKey(ctx, ctx.Doer.ID, content, 0); err != nil {
+ ctx.Data["HasPrincipalError"] = true
+ switch {
+ case asymkey_model.IsErrKeyAlreadyExist(err), asymkey_model.IsErrKeyNameAlreadyUsed(err):
+ loadKeysData(ctx)
+
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_principal_been_used"), tplSettingsKeys, &form)
+ default:
+ ctx.ServerError("AddPrincipalKey", err)
+ }
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("settings.add_principal_success", form.Content))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ case "gpg":
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
+ ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ return
+ }
+
+ token := asymkey_model.VerificationToken(ctx.Doer, 1)
+ lastToken := asymkey_model.VerificationToken(ctx.Doer, 0)
+
+ keys, err := asymkey_model.AddGPGKey(ctx, ctx.Doer.ID, form.Content, token, form.Signature)
+ if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) {
+ keys, err = asymkey_model.AddGPGKey(ctx, ctx.Doer.ID, form.Content, lastToken, form.Signature)
+ }
+ if err != nil {
+ ctx.Data["HasGPGError"] = true
+ switch {
+ case asymkey_model.IsErrGPGKeyParsing(err):
+ ctx.Flash.Error(ctx.Tr("form.invalid_gpg_key", err.Error()))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ case asymkey_model.IsErrGPGKeyIDAlreadyUsed(err):
+ loadKeysData(ctx)
+
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.gpg_key_id_used"), tplSettingsKeys, &form)
+ case asymkey_model.IsErrGPGInvalidTokenSignature(err):
+ loadKeysData(ctx)
+ ctx.Data["Err_Content"] = true
+ ctx.Data["Err_Signature"] = true
+ keyID := err.(asymkey_model.ErrGPGInvalidTokenSignature).ID
+ ctx.Data["KeyID"] = keyID
+ ctx.Data["PaddedKeyID"] = asymkey_model.PaddedKeyID(keyID)
+ ctx.RenderWithErr(ctx.Tr("settings.gpg_invalid_token_signature"), tplSettingsKeys, &form)
+ case asymkey_model.IsErrGPGNoEmailFound(err):
+ loadKeysData(ctx)
+
+ ctx.Data["Err_Content"] = true
+ ctx.Data["Err_Signature"] = true
+ keyID := err.(asymkey_model.ErrGPGNoEmailFound).ID
+ ctx.Data["KeyID"] = keyID
+ ctx.Data["PaddedKeyID"] = asymkey_model.PaddedKeyID(keyID)
+ ctx.RenderWithErr(ctx.Tr("settings.gpg_no_key_email_found"), tplSettingsKeys, &form)
+ default:
+ ctx.ServerError("AddPublicKey", err)
+ }
+ return
+ }
+ keyIDs := ""
+ for _, key := range keys {
+ keyIDs += key.KeyID
+ keyIDs += ", "
+ }
+ if len(keyIDs) > 0 {
+ keyIDs = keyIDs[:len(keyIDs)-2]
+ }
+ ctx.Flash.Success(ctx.Tr("settings.add_gpg_key_success", keyIDs))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ case "verify_gpg":
+ token := asymkey_model.VerificationToken(ctx.Doer, 1)
+ lastToken := asymkey_model.VerificationToken(ctx.Doer, 0)
+
+ keyID, err := asymkey_model.VerifyGPGKey(ctx, ctx.Doer.ID, form.KeyID, token, form.Signature)
+ if err != nil && asymkey_model.IsErrGPGInvalidTokenSignature(err) {
+ keyID, err = asymkey_model.VerifyGPGKey(ctx, ctx.Doer.ID, form.KeyID, lastToken, form.Signature)
+ }
+ if err != nil {
+ ctx.Data["HasGPGVerifyError"] = true
+ switch {
+ case asymkey_model.IsErrGPGInvalidTokenSignature(err):
+ loadKeysData(ctx)
+ ctx.Data["VerifyingID"] = form.KeyID
+ ctx.Data["Err_Signature"] = true
+ keyID := err.(asymkey_model.ErrGPGInvalidTokenSignature).ID
+ ctx.Data["KeyID"] = keyID
+ ctx.Data["PaddedKeyID"] = asymkey_model.PaddedKeyID(keyID)
+ ctx.RenderWithErr(ctx.Tr("settings.gpg_invalid_token_signature"), tplSettingsKeys, &form)
+ default:
+ ctx.ServerError("VerifyGPG", err)
+ }
+ }
+ ctx.Flash.Success(ctx.Tr("settings.verify_gpg_key_success", keyID))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ case "ssh":
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
+ ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ return
+ }
+
+ content, err := asymkey_model.CheckPublicKeyString(form.Content)
+ if err != nil {
+ if db.IsErrSSHDisabled(err) {
+ ctx.Flash.Info(ctx.Tr("settings.ssh_disabled"))
+ } else if asymkey_model.IsErrKeyUnableVerify(err) {
+ ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key"))
+ } else if err == asymkey_model.ErrKeyIsPrivate {
+ ctx.Flash.Error(ctx.Tr("form.must_use_public_key"))
+ } else {
+ ctx.Flash.Error(ctx.Tr("form.invalid_ssh_key", err.Error()))
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ return
+ }
+
+ if _, err = asymkey_model.AddPublicKey(ctx, ctx.Doer.ID, form.Title, content, 0); err != nil {
+ ctx.Data["HasSSHError"] = true
+ switch {
+ case asymkey_model.IsErrKeyAlreadyExist(err):
+ loadKeysData(ctx)
+
+ ctx.Data["Err_Content"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_key_been_used"), tplSettingsKeys, &form)
+ case asymkey_model.IsErrKeyNameAlreadyUsed(err):
+ loadKeysData(ctx)
+
+ ctx.Data["Err_Title"] = true
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_key_name_used"), tplSettingsKeys, &form)
+ case asymkey_model.IsErrKeyUnableVerify(err):
+ ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ default:
+ ctx.ServerError("AddPublicKey", err)
+ }
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("settings.add_key_success", form.Title))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ case "verify_ssh":
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
+ ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ return
+ }
+
+ token := asymkey_model.VerificationToken(ctx.Doer, 1)
+ lastToken := asymkey_model.VerificationToken(ctx.Doer, 0)
+
+ fingerprint, err := asymkey_model.VerifySSHKey(ctx, ctx.Doer.ID, form.Fingerprint, token, form.Signature)
+ if err != nil && asymkey_model.IsErrSSHInvalidTokenSignature(err) {
+ fingerprint, err = asymkey_model.VerifySSHKey(ctx, ctx.Doer.ID, form.Fingerprint, lastToken, form.Signature)
+ }
+ if err != nil {
+ ctx.Data["HasSSHVerifyError"] = true
+ switch {
+ case asymkey_model.IsErrSSHInvalidTokenSignature(err):
+ loadKeysData(ctx)
+ ctx.Data["Err_Signature"] = true
+ ctx.Data["Fingerprint"] = err.(asymkey_model.ErrSSHInvalidTokenSignature).Fingerprint
+ ctx.RenderWithErr(ctx.Tr("settings.ssh_invalid_token_signature"), tplSettingsKeys, &form)
+ default:
+ ctx.ServerError("VerifySSH", err)
+ }
+ }
+ ctx.Flash.Success(ctx.Tr("settings.verify_ssh_key_success", fingerprint))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+
+ default:
+ ctx.Flash.Warning("Function not implemented")
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ }
+}
+
+// DeleteKey response for delete user's SSH/GPG key
+func DeleteKey(ctx *context.Context) {
+ switch ctx.FormString("type") {
+ case "gpg":
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
+ ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ return
+ }
+ if err := asymkey_model.DeleteGPGKey(ctx, ctx.Doer, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteGPGKey: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.gpg_key_deletion_success"))
+ }
+ case "ssh":
+ if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
+ ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ return
+ }
+
+ keyID := ctx.FormInt64("id")
+ external, err := asymkey_model.PublicKeyIsExternallyManaged(ctx, keyID)
+ if err != nil {
+ ctx.ServerError("sshKeysExternalManaged", err)
+ return
+ }
+ if external {
+ ctx.Flash.Error(ctx.Tr("settings.ssh_externally_managed"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ return
+ }
+ if err := asymkey_service.DeletePublicKey(ctx, ctx.Doer, keyID); err != nil {
+ ctx.Flash.Error("DeletePublicKey: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.ssh_key_deletion_success"))
+ }
+ case "principal":
+ if err := asymkey_service.DeletePublicKey(ctx, ctx.Doer, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeletePublicKey: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.ssh_principal_deletion_success"))
+ }
+ default:
+ ctx.Flash.Warning("Function not implemented")
+ ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
+ }
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/keys")
+}
+
+func loadKeysData(ctx *context.Context) {
+ keys, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ OwnerID: ctx.Doer.ID,
+ NotKeytype: asymkey_model.KeyTypePrincipal,
+ })
+ if err != nil {
+ ctx.ServerError("ListPublicKeys", err)
+ return
+ }
+ ctx.Data["Keys"] = keys
+
+ externalKeys, err := asymkey_model.PublicKeysAreExternallyManaged(ctx, keys)
+ if err != nil {
+ ctx.ServerError("ListPublicKeys", err)
+ return
+ }
+ ctx.Data["ExternalKeys"] = externalKeys
+
+ gpgkeys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: ctx.Doer.ID,
+ })
+ if err != nil {
+ ctx.ServerError("ListGPGKeys", err)
+ return
+ }
+ if err := asymkey_model.GPGKeyList(gpgkeys).LoadSubKeys(ctx); err != nil {
+ ctx.ServerError("LoadSubKeys", err)
+ return
+ }
+ ctx.Data["GPGKeys"] = gpgkeys
+ tokenToSign := asymkey_model.VerificationToken(ctx.Doer, 1)
+
+ // generate a new aes cipher using the csrfToken
+ ctx.Data["TokenToSign"] = tokenToSign
+
+ principals, err := db.Find[asymkey_model.PublicKey](ctx, asymkey_model.FindPublicKeyOptions{
+ ListOptions: db.ListOptionsAll,
+ OwnerID: ctx.Doer.ID,
+ KeyTypes: []asymkey_model.KeyType{asymkey_model.KeyTypePrincipal},
+ })
+ if err != nil {
+ ctx.ServerError("ListPrincipalKeys", err)
+ return
+ }
+ ctx.Data["Principals"] = principals
+
+ ctx.Data["VerifyingID"] = ctx.FormString("verify_gpg")
+ ctx.Data["VerifyingFingerprint"] = ctx.FormString("verify_ssh")
+ ctx.Data["UserDisabledFeatures"] = user_model.DisabledFeaturesWithLoginType(ctx.Doer)
+}
diff --git a/routers/web/user/setting/main_test.go b/routers/web/user/setting/main_test.go
new file mode 100644
index 0000000..e398208
--- /dev/null
+++ b/routers/web/user/setting/main_test.go
@@ -0,0 +1,14 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
diff --git a/routers/web/user/setting/oauth2.go b/routers/web/user/setting/oauth2.go
new file mode 100644
index 0000000..1f485e0
--- /dev/null
+++ b/routers/web/user/setting/oauth2.go
@@ -0,0 +1,68 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsOAuthApplicationEdit base.TplName = "user/settings/applications_oauth2_edit"
+)
+
+func newOAuth2CommonHandlers(userID int64) *OAuth2CommonHandlers {
+ return &OAuth2CommonHandlers{
+ OwnerID: userID,
+ BasePathList: setting.AppSubURL + "/user/settings/applications",
+ BasePathEditPrefix: setting.AppSubURL + "/user/settings/applications/oauth2",
+ TplAppEdit: tplSettingsOAuthApplicationEdit,
+ }
+}
+
+// OAuthApplicationsPost response for adding a oauth2 application
+func OAuthApplicationsPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.AddApp(ctx)
+}
+
+// OAuthApplicationsEdit response for editing oauth2 application
+func OAuthApplicationsEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.EditSave(ctx)
+}
+
+// OAuthApplicationsRegenerateSecret handles the post request for regenerating the secret
+func OAuthApplicationsRegenerateSecret(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsApplications"] = true
+
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.RegenerateSecret(ctx)
+}
+
+// OAuth2ApplicationShow displays the given application
+func OAuth2ApplicationShow(ctx *context.Context) {
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.EditShow(ctx)
+}
+
+// DeleteOAuth2Application deletes the given oauth2 application
+func DeleteOAuth2Application(ctx *context.Context) {
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.DeleteApp(ctx)
+}
+
+// RevokeOAuth2Grant revokes the grant with the given id
+func RevokeOAuth2Grant(ctx *context.Context) {
+ oa := newOAuth2CommonHandlers(ctx.Doer.ID)
+ oa.RevokeGrant(ctx)
+}
diff --git a/routers/web/user/setting/oauth2_common.go b/routers/web/user/setting/oauth2_common.go
new file mode 100644
index 0000000..85d1e82
--- /dev/null
+++ b/routers/web/user/setting/oauth2_common.go
@@ -0,0 +1,163 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/http"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ shared_user "code.gitea.io/gitea/routers/web/shared/user"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+type OAuth2CommonHandlers struct {
+ OwnerID int64 // 0 for instance-wide, otherwise OrgID or UserID
+ BasePathList string // the base URL for the application list page, eg: "/user/setting/applications"
+ BasePathEditPrefix string // the base URL for the application edit page, will be appended with app id, eg: "/user/setting/applications/oauth2"
+ TplAppEdit base.TplName // the template for the application edit page
+}
+
+func (oa *OAuth2CommonHandlers) renderEditPage(ctx *context.Context) {
+ app := ctx.Data["App"].(*auth.OAuth2Application)
+ ctx.Data["FormActionPath"] = fmt.Sprintf("%s/%d", oa.BasePathEditPrefix, app.ID)
+
+ if ctx.ContextUser != nil && ctx.ContextUser.IsOrganization() {
+ if err := shared_user.LoadHeaderCount(ctx); err != nil {
+ ctx.ServerError("LoadHeaderCount", err)
+ return
+ }
+ }
+
+ ctx.HTML(http.StatusOK, oa.TplAppEdit)
+}
+
+// AddApp adds an oauth2 application
+func (oa *OAuth2CommonHandlers) AddApp(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditOAuth2ApplicationForm)
+ if ctx.HasError() {
+ ctx.Flash.Error(ctx.GetErrMsg())
+ // go to the application list page
+ ctx.Redirect(oa.BasePathList)
+ return
+ }
+
+ // TODO validate redirect URI
+ app, err := auth.CreateOAuth2Application(ctx, auth.CreateOAuth2ApplicationOptions{
+ Name: form.Name,
+ RedirectURIs: util.SplitTrimSpace(form.RedirectURIs, "\n"),
+ UserID: oa.OwnerID,
+ ConfidentialClient: form.ConfidentialClient,
+ })
+ if err != nil {
+ ctx.ServerError("CreateOAuth2Application", err)
+ return
+ }
+
+ // render the edit page with secret
+ ctx.Flash.Success(ctx.Tr("settings.create_oauth2_application_success"), true)
+ ctx.Data["App"] = app
+ ctx.Data["ClientSecret"], err = app.GenerateClientSecret(ctx)
+ if err != nil {
+ ctx.ServerError("GenerateClientSecret", err)
+ return
+ }
+
+ oa.renderEditPage(ctx)
+}
+
+// EditShow displays the given application
+func (oa *OAuth2CommonHandlers) EditShow(ctx *context.Context) {
+ app, err := auth.GetOAuth2ApplicationByID(ctx, ctx.ParamsInt64("id"))
+ if err != nil {
+ if auth.IsErrOAuthApplicationNotFound(err) {
+ ctx.NotFound("Application not found", err)
+ return
+ }
+ ctx.ServerError("GetOAuth2ApplicationByID", err)
+ return
+ }
+ if app.UID != oa.OwnerID {
+ ctx.NotFound("Application not found", nil)
+ return
+ }
+ ctx.Data["App"] = app
+ oa.renderEditPage(ctx)
+}
+
+// EditSave saves the oauth2 application
+func (oa *OAuth2CommonHandlers) EditSave(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.EditOAuth2ApplicationForm)
+
+ if ctx.HasError() {
+ oa.renderEditPage(ctx)
+ return
+ }
+
+ // TODO validate redirect URI
+ var err error
+ if ctx.Data["App"], err = auth.UpdateOAuth2Application(ctx, auth.UpdateOAuth2ApplicationOptions{
+ ID: ctx.ParamsInt64("id"),
+ Name: form.Name,
+ RedirectURIs: util.SplitTrimSpace(form.RedirectURIs, "\n"),
+ UserID: oa.OwnerID,
+ ConfidentialClient: form.ConfidentialClient,
+ }); err != nil {
+ ctx.ServerError("UpdateOAuth2Application", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("settings.update_oauth2_application_success"))
+ ctx.Redirect(oa.BasePathList)
+}
+
+// RegenerateSecret regenerates the secret
+func (oa *OAuth2CommonHandlers) RegenerateSecret(ctx *context.Context) {
+ app, err := auth.GetOAuth2ApplicationByID(ctx, ctx.ParamsInt64("id"))
+ if err != nil {
+ if auth.IsErrOAuthApplicationNotFound(err) {
+ ctx.NotFound("Application not found", err)
+ return
+ }
+ ctx.ServerError("GetOAuth2ApplicationByID", err)
+ return
+ }
+ if app.UID != oa.OwnerID {
+ ctx.NotFound("Application not found", nil)
+ return
+ }
+ ctx.Data["App"] = app
+ ctx.Data["ClientSecret"], err = app.GenerateClientSecret(ctx)
+ if err != nil {
+ ctx.ServerError("GenerateClientSecret", err)
+ return
+ }
+ ctx.Flash.Success(ctx.Tr("settings.update_oauth2_application_success"), true)
+ oa.renderEditPage(ctx)
+}
+
+// DeleteApp deletes the given oauth2 application
+func (oa *OAuth2CommonHandlers) DeleteApp(ctx *context.Context) {
+ if err := auth.DeleteOAuth2Application(ctx, ctx.ParamsInt64("id"), oa.OwnerID); err != nil {
+ ctx.ServerError("DeleteOAuth2Application", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.remove_oauth2_application_success"))
+ ctx.JSONRedirect(oa.BasePathList)
+}
+
+// RevokeGrant revokes the grant
+func (oa *OAuth2CommonHandlers) RevokeGrant(ctx *context.Context) {
+ if err := auth.RevokeOAuth2Grant(ctx, ctx.ParamsInt64("grantId"), oa.OwnerID); err != nil {
+ ctx.ServerError("RevokeOAuth2Grant", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.revoke_oauth2_grant_success"))
+ ctx.JSONRedirect(oa.BasePathList)
+}
diff --git a/routers/web/user/setting/packages.go b/routers/web/user/setting/packages.go
new file mode 100644
index 0000000..4132659
--- /dev/null
+++ b/routers/web/user/setting/packages.go
@@ -0,0 +1,119 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ chef_module "code.gitea.io/gitea/modules/packages/chef"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+ shared "code.gitea.io/gitea/routers/web/shared/packages"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsPackages base.TplName = "user/settings/packages"
+ tplSettingsPackagesRuleEdit base.TplName = "user/settings/packages_cleanup_rules_edit"
+ tplSettingsPackagesRulePreview base.TplName = "user/settings/packages_cleanup_rules_preview"
+)
+
+func Packages(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.SetPackagesContext(ctx, ctx.Doer)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackages)
+}
+
+func PackagesRuleAdd(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.SetRuleAddContext(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRuleEdit)
+}
+
+func PackagesRuleEdit(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.SetRuleEditContext(ctx, ctx.Doer)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRuleEdit)
+}
+
+func PackagesRuleAddPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.PerformRuleAddPost(
+ ctx,
+ ctx.Doer,
+ setting.AppSubURL+"/user/settings/packages",
+ tplSettingsPackagesRuleEdit,
+ )
+}
+
+func PackagesRuleEditPost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.PerformRuleEditPost(
+ ctx,
+ ctx.Doer,
+ setting.AppSubURL+"/user/settings/packages",
+ tplSettingsPackagesRuleEdit,
+ )
+}
+
+func PackagesRulePreview(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.SetRulePreviewContext(ctx, ctx.Doer)
+
+ ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview)
+}
+
+func InitializeCargoIndex(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.InitializeCargoIndex(ctx, ctx.Doer)
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
+}
+
+func RebuildCargoIndex(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("packages.title")
+ ctx.Data["PageIsSettingsPackages"] = true
+
+ shared.RebuildCargoIndex(ctx, ctx.Doer)
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/packages")
+}
+
+func RegenerateChefKeyPair(ctx *context.Context) {
+ priv, pub, err := util.GenerateKeyPair(chef_module.KeyBits)
+ if err != nil {
+ ctx.ServerError("GenerateKeyPair", err)
+ return
+ }
+
+ if err := user_model.SetUserSetting(ctx, ctx.Doer.ID, chef_module.SettingPublicPem, pub); err != nil {
+ ctx.ServerError("SetUserSetting", err)
+ return
+ }
+
+ ctx.ServeContent(strings.NewReader(priv), &context.ServeHeaderOptions{
+ ContentType: "application/x-pem-file",
+ Filename: ctx.Doer.Name + ".priv",
+ })
+}
diff --git a/routers/web/user/setting/profile.go b/routers/web/user/setting/profile.go
new file mode 100644
index 0000000..907f0f5
--- /dev/null
+++ b/routers/web/user/setting/profile.go
@@ -0,0 +1,433 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "io"
+ "math/big"
+ "net/http"
+ "os"
+ "path/filepath"
+ "slices"
+ "strings"
+
+ "code.gitea.io/gitea/models/avatars"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/organization"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/translation"
+ "code.gitea.io/gitea/modules/typesniffer"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ user_service "code.gitea.io/gitea/services/user"
+)
+
+const (
+ tplSettingsProfile base.TplName = "user/settings/profile"
+ tplSettingsAppearance base.TplName = "user/settings/appearance"
+ tplSettingsOrganization base.TplName = "user/settings/organization"
+ tplSettingsRepositories base.TplName = "user/settings/repos"
+)
+
+// must be kept in sync with `web_src/js/features/user-settings.js`
+var recognisedPronouns = []string{"", "he/him", "she/her", "they/them", "it/its", "any pronouns"}
+
+// Profile render user's profile page
+func Profile(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.profile")
+ ctx.Data["PageIsSettingsProfile"] = true
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+ ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
+ ctx.Data["PronounsAreCustom"] = !slices.Contains(recognisedPronouns, ctx.Doer.Pronouns)
+
+ ctx.HTML(http.StatusOK, tplSettingsProfile)
+}
+
+// ProfilePost response for change user's profile
+func ProfilePost(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsProfile"] = true
+ ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
+ ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
+ ctx.Data["PronounsAreCustom"] = !slices.Contains(recognisedPronouns, ctx.Doer.Pronouns)
+
+ if ctx.HasError() {
+ ctx.HTML(http.StatusOK, tplSettingsProfile)
+ return
+ }
+
+ form := web.GetForm(ctx).(*forms.UpdateProfileForm)
+
+ if form.Name != "" {
+ if err := user_service.RenameUser(ctx, ctx.Doer, form.Name); err != nil {
+ switch {
+ case user_model.IsErrUserIsNotLocal(err):
+ ctx.Flash.Error(ctx.Tr("form.username_change_not_local_user"))
+ case user_model.IsErrUserAlreadyExist(err):
+ ctx.Flash.Error(ctx.Tr("form.username_been_taken"))
+ case db.IsErrNameReserved(err):
+ ctx.Flash.Error(ctx.Tr("user.form.name_reserved", form.Name))
+ case db.IsErrNamePatternNotAllowed(err):
+ ctx.Flash.Error(ctx.Tr("user.form.name_pattern_not_allowed", form.Name))
+ case db.IsErrNameCharsNotAllowed(err):
+ ctx.Flash.Error(ctx.Tr("user.form.name_chars_not_allowed", form.Name))
+ default:
+ ctx.ServerError("RenameUser", err)
+ return
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/settings")
+ return
+ }
+ }
+
+ opts := &user_service.UpdateOptions{
+ FullName: optional.Some(form.FullName),
+ KeepEmailPrivate: optional.Some(form.KeepEmailPrivate),
+ Description: optional.Some(form.Biography),
+ Pronouns: optional.Some(form.Pronouns),
+ Website: optional.Some(form.Website),
+ Location: optional.Some(form.Location),
+ Visibility: optional.Some(form.Visibility),
+ KeepActivityPrivate: optional.Some(form.KeepActivityPrivate),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ log.Trace("User settings updated: %s", ctx.Doer.Name)
+ ctx.Flash.Success(ctx.Tr("settings.update_profile_success"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings")
+}
+
+// UpdateAvatarSetting update user's avatar
+// FIXME: limit size.
+func UpdateAvatarSetting(ctx *context.Context, form *forms.AvatarForm, ctxUser *user_model.User) error {
+ ctxUser.UseCustomAvatar = form.Source == forms.AvatarLocal
+ if len(form.Gravatar) > 0 {
+ if form.Avatar != nil {
+ ctxUser.Avatar = avatars.HashEmail(form.Gravatar)
+ } else {
+ ctxUser.Avatar = ""
+ }
+ ctxUser.AvatarEmail = form.Gravatar
+ }
+
+ if form.Avatar != nil && form.Avatar.Filename != "" {
+ fr, err := form.Avatar.Open()
+ if err != nil {
+ return fmt.Errorf("Avatar.Open: %w", err)
+ }
+ defer fr.Close()
+
+ if form.Avatar.Size > setting.Avatar.MaxFileSize {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_is_too_big", form.Avatar.Size/1024, setting.Avatar.MaxFileSize/1024))
+ }
+
+ data, err := io.ReadAll(fr)
+ if err != nil {
+ return fmt.Errorf("io.ReadAll: %w", err)
+ }
+
+ st := typesniffer.DetectContentType(data)
+ if !(st.IsImage() && !st.IsSvgImage()) {
+ return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_not_a_image"))
+ }
+ if err = user_service.UploadAvatar(ctx, ctxUser, data); err != nil {
+ return fmt.Errorf("UploadAvatar: %w", err)
+ }
+ } else if ctxUser.UseCustomAvatar && ctxUser.Avatar == "" {
+ // No avatar is uploaded but setting has been changed to enable,
+ // generate a random one when needed.
+ if err := user_model.GenerateRandomAvatar(ctx, ctxUser); err != nil {
+ log.Error("GenerateRandomAvatar[%d]: %v", ctxUser.ID, err)
+ }
+ }
+
+ if err := user_model.UpdateUserCols(ctx, ctxUser, "avatar", "avatar_email", "use_custom_avatar"); err != nil {
+ return fmt.Errorf("UpdateUserCols: %w", err)
+ }
+
+ return nil
+}
+
+// AvatarPost response for change user's avatar request
+func AvatarPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AvatarForm)
+ if err := UpdateAvatarSetting(ctx, form, ctx.Doer); err != nil {
+ ctx.Flash.Error(err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.update_avatar_success"))
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings")
+}
+
+// DeleteAvatar render delete avatar page
+func DeleteAvatar(ctx *context.Context) {
+ if err := user_service.DeleteAvatar(ctx, ctx.Doer); err != nil {
+ ctx.Flash.Error(err.Error())
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings")
+}
+
+// Organization render all the organization of the user
+func Organization(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.organization")
+ ctx.Data["PageIsSettingsOrganization"] = true
+
+ opts := organization.FindOrgOptions{
+ ListOptions: db.ListOptions{
+ PageSize: setting.UI.Admin.UserPagingNum,
+ Page: ctx.FormInt("page"),
+ },
+ UserID: ctx.Doer.ID,
+ IncludePrivate: ctx.IsSigned,
+ }
+
+ if opts.Page <= 0 {
+ opts.Page = 1
+ }
+
+ orgs, total, err := db.FindAndCount[organization.Organization](ctx, opts)
+ if err != nil {
+ ctx.ServerError("FindOrgs", err)
+ return
+ }
+
+ ctx.Data["Orgs"] = orgs
+ pager := context.NewPagination(int(total), opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsOrganization)
+}
+
+// Repos display a list of all repositories of the user
+func Repos(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.repos")
+ ctx.Data["PageIsSettingsRepos"] = true
+ ctx.Data["allowAdopt"] = ctx.IsUserSiteAdmin() || setting.Repository.AllowAdoptionOfUnadoptedRepositories
+ ctx.Data["allowDelete"] = ctx.IsUserSiteAdmin() || setting.Repository.AllowDeleteOfUnadoptedRepositories
+
+ opts := db.ListOptions{
+ PageSize: setting.UI.Admin.UserPagingNum,
+ Page: ctx.FormInt("page"),
+ }
+
+ if opts.Page <= 0 {
+ opts.Page = 1
+ }
+ start := (opts.Page - 1) * opts.PageSize
+ end := start + opts.PageSize
+
+ adoptOrDelete := ctx.IsUserSiteAdmin() || (setting.Repository.AllowAdoptionOfUnadoptedRepositories && setting.Repository.AllowDeleteOfUnadoptedRepositories)
+
+ ctxUser := ctx.Doer
+ count := 0
+
+ if adoptOrDelete {
+ repoNames := make([]string, 0, setting.UI.Admin.UserPagingNum)
+ repos := map[string]*repo_model.Repository{}
+ // We're going to iterate by pagesize.
+ root := user_model.UserPath(ctxUser.Name)
+ if err := filepath.WalkDir(root, func(path string, d os.DirEntry, err error) error {
+ if err != nil {
+ if os.IsNotExist(err) {
+ return nil
+ }
+ return err
+ }
+ if !d.IsDir() || path == root {
+ return nil
+ }
+ name := d.Name()
+ if !strings.HasSuffix(name, ".git") {
+ return filepath.SkipDir
+ }
+ name = name[:len(name)-4]
+ if repo_model.IsUsableRepoName(name) != nil || strings.ToLower(name) != name {
+ return filepath.SkipDir
+ }
+ if count >= start && count < end {
+ repoNames = append(repoNames, name)
+ }
+ count++
+ return filepath.SkipDir
+ }); err != nil {
+ ctx.ServerError("filepath.WalkDir", err)
+ return
+ }
+
+ userRepos, _, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{
+ Actor: ctxUser,
+ Private: true,
+ ListOptions: db.ListOptions{
+ Page: 1,
+ PageSize: setting.UI.Admin.UserPagingNum,
+ },
+ LowerNames: repoNames,
+ })
+ if err != nil {
+ ctx.ServerError("GetUserRepositories", err)
+ return
+ }
+ for _, repo := range userRepos {
+ if repo.IsFork {
+ if err := repo.GetBaseRepo(ctx); err != nil {
+ ctx.ServerError("GetBaseRepo", err)
+ return
+ }
+ }
+ repos[repo.LowerName] = repo
+ }
+ ctx.Data["Dirs"] = repoNames
+ ctx.Data["ReposMap"] = repos
+ } else {
+ repos, count64, err := repo_model.GetUserRepositories(ctx, &repo_model.SearchRepoOptions{Actor: ctxUser, Private: true, ListOptions: opts})
+ if err != nil {
+ ctx.ServerError("GetUserRepositories", err)
+ return
+ }
+ count = int(count64)
+
+ for i := range repos {
+ if repos[i].IsFork {
+ if err := repos[i].GetBaseRepo(ctx); err != nil {
+ ctx.ServerError("GetBaseRepo", err)
+ return
+ }
+ }
+ }
+
+ ctx.Data["Repos"] = repos
+ }
+ ctx.Data["ContextUser"] = ctxUser
+ pager := context.NewPagination(count, opts.PageSize, opts.Page, 5)
+ pager.SetDefaultParams(ctx)
+ ctx.Data["Page"] = pager
+ ctx.HTML(http.StatusOK, tplSettingsRepositories)
+}
+
+// Appearance render user's appearance settings
+func Appearance(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.appearance")
+ ctx.Data["PageIsSettingsAppearance"] = true
+
+ var hiddenCommentTypes *big.Int
+ val, err := user_model.GetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes)
+ if err != nil {
+ ctx.ServerError("GetUserSetting", err)
+ return
+ }
+ hiddenCommentTypes, _ = new(big.Int).SetString(val, 10) // we can safely ignore the failed conversion here
+
+ ctx.Data["IsCommentTypeGroupChecked"] = func(commentTypeGroup string) bool {
+ return forms.IsUserHiddenCommentTypeGroupChecked(commentTypeGroup, hiddenCommentTypes)
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsAppearance)
+}
+
+// UpdateUIThemePost is used to update users' specific theme
+func UpdateUIThemePost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateThemeForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAppearance"] = true
+
+ if ctx.HasError() {
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+ return
+ }
+
+ if !form.IsThemeExists() {
+ ctx.Flash.Error(ctx.Tr("settings.theme_update_error"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+ return
+ }
+
+ opts := &user_service.UpdateOptions{
+ Theme: optional.Some(form.Theme),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.Flash.Error(ctx.Tr("settings.theme_update_error"))
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.theme_update_success"))
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+}
+
+// UpdateUserLang update a user's language
+func UpdateUserLang(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateLanguageForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAppearance"] = true
+
+ if form.Language != "" {
+ if !util.SliceContainsString(setting.Langs, form.Language) {
+ ctx.Flash.Error(ctx.Tr("settings.update_language_not_found", form.Language))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+ return
+ }
+ }
+
+ opts := &user_service.UpdateOptions{
+ Language: optional.Some(form.Language),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ // Update the language to the one we just set
+ middleware.SetLocaleCookie(ctx.Resp, ctx.Doer.Language, 0)
+
+ log.Trace("User settings updated: %s", ctx.Doer.Name)
+ ctx.Flash.Success(translation.NewLocale(ctx.Doer.Language).TrString("settings.update_language_success"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+}
+
+// UpdateUserHints updates a user's hints settings
+func UpdateUserHints(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.UpdateHintsForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsAppearance"] = true
+
+ opts := &user_service.UpdateOptions{
+ EnableRepoUnitHints: optional.Some(form.EnableRepoUnitHints),
+ }
+ if err := user_service.UpdateUser(ctx, ctx.Doer, opts); err != nil {
+ ctx.ServerError("UpdateUser", err)
+ return
+ }
+
+ log.Trace("User settings updated: %s", ctx.Doer.Name)
+ ctx.Flash.Success(translation.NewLocale(ctx.Doer.Language).TrString("settings.update_hints_success"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+}
+
+// UpdateUserHiddenComments update a user's shown comment types
+func UpdateUserHiddenComments(ctx *context.Context) {
+ err := user_model.SetUserSetting(ctx, ctx.Doer.ID, user_model.SettingsKeyHiddenCommentTypes, forms.UserHiddenCommentTypesFromRequest(ctx).String())
+ if err != nil {
+ ctx.ServerError("SetUserSetting", err)
+ return
+ }
+
+ log.Trace("User settings updated: %s", ctx.Doer.Name)
+ ctx.Flash.Success(ctx.Tr("settings.saved_successfully"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/appearance")
+}
diff --git a/routers/web/user/setting/runner.go b/routers/web/user/setting/runner.go
new file mode 100644
index 0000000..2bb10cc
--- /dev/null
+++ b/routers/web/user/setting/runner.go
@@ -0,0 +1,13 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+func RedirectToDefaultSetting(ctx *context.Context) {
+ ctx.Redirect(setting.AppSubURL + "/user/settings/actions/runners")
+}
diff --git a/routers/web/user/setting/security/2fa.go b/routers/web/user/setting/security/2fa.go
new file mode 100644
index 0000000..a145867
--- /dev/null
+++ b/routers/web/user/setting/security/2fa.go
@@ -0,0 +1,260 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "bytes"
+ "encoding/base64"
+ "html/template"
+ "image/png"
+ "net/http"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+
+ "github.com/pquerna/otp"
+ "github.com/pquerna/otp/totp"
+)
+
+// RegenerateScratchTwoFactor regenerates the user's 2FA scratch code.
+func RegenerateScratchTwoFactor(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ t, err := auth.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if err != nil {
+ if auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ }
+ ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
+ return
+ }
+
+ token, err := t.GenerateScratchToken()
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to GenerateScratchToken", err)
+ return
+ }
+
+ if err = auth.UpdateTwoFactor(ctx, t); err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to UpdateTwoFactor", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.twofa_scratch_token_regenerated", token))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+}
+
+// DisableTwoFactor deletes the user's 2FA settings.
+func DisableTwoFactor(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ t, err := auth.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if err != nil {
+ if auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.Flash.Error(ctx.Tr("settings.twofa_not_enrolled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ }
+ ctx.ServerError("SettingsTwoFactor: Failed to GetTwoFactorByUID", err)
+ return
+ }
+
+ if err = auth.DeleteTwoFactorByID(ctx, t.ID, ctx.Doer.ID); err != nil {
+ if auth.IsErrTwoFactorNotEnrolled(err) {
+ // There is a potential DB race here - we must have been disabled by another request in the intervening period
+ ctx.Flash.Success(ctx.Tr("settings.twofa_disabled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ }
+ ctx.ServerError("SettingsTwoFactor: Failed to DeleteTwoFactorByID", err)
+ return
+ }
+
+ if err := mailer.SendDisabledTOTP(ctx, ctx.Doer); err != nil {
+ ctx.ServerError("SendDisabledTOTP", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.twofa_disabled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+}
+
+func twofaGenerateSecretAndQr(ctx *context.Context) bool {
+ var otpKey *otp.Key
+ var err error
+ uri := ctx.Session.Get("twofaUri")
+ if uri != nil {
+ otpKey, err = otp.NewKeyFromURL(uri.(string))
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed NewKeyFromURL: ", err)
+ return false
+ }
+ }
+ // Filter unsafe character ':' in issuer
+ issuer := strings.ReplaceAll(setting.AppName+" ("+setting.Domain+")", ":", "")
+ if otpKey == nil {
+ otpKey, err = totp.Generate(totp.GenerateOpts{
+ SecretSize: 40,
+ Issuer: issuer,
+ AccountName: ctx.Doer.Name,
+ })
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: totpGenerate Failed", err)
+ return false
+ }
+ }
+
+ ctx.Data["TwofaSecret"] = otpKey.Secret()
+ img, err := otpKey.Image(320, 240)
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: otpKey image generation failed", err)
+ return false
+ }
+
+ var imgBytes bytes.Buffer
+ if err = png.Encode(&imgBytes, img); err != nil {
+ ctx.ServerError("SettingsTwoFactor: otpKey png encoding failed", err)
+ return false
+ }
+
+ ctx.Data["QrUri"] = template.URL("data:image/png;base64," + base64.StdEncoding.EncodeToString(imgBytes.Bytes()))
+
+ if err := ctx.Session.Set("twofaSecret", otpKey.Secret()); err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to set session for twofaSecret", err)
+ return false
+ }
+
+ if err := ctx.Session.Set("twofaUri", otpKey.String()); err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to set session for twofaUri", err)
+ return false
+ }
+
+ // Here we're just going to try to release the session early
+ if err := ctx.Session.Release(); err != nil {
+ // we'll tolerate errors here as they *should* get saved elsewhere
+ log.Error("Unable to save changes to the session: %v", err)
+ }
+ return true
+}
+
+// EnrollTwoFactor shows the page where the user can enroll into 2FA.
+func EnrollTwoFactor(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ t, err := auth.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if t != nil {
+ // already enrolled - we should redirect back!
+ log.Warn("Trying to re-enroll %-v in twofa when already enrolled", ctx.Doer)
+ ctx.Flash.Error(ctx.Tr("settings.twofa_is_enrolled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ return
+ }
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("SettingsTwoFactor: GetTwoFactorByUID", err)
+ return
+ }
+
+ if !twofaGenerateSecretAndQr(ctx) {
+ return
+ }
+
+ ctx.HTML(http.StatusOK, tplSettingsTwofaEnroll)
+}
+
+// EnrollTwoFactorPost handles enrolling the user into 2FA.
+func EnrollTwoFactorPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.TwoFactorAuthForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ t, err := auth.GetTwoFactorByUID(ctx, ctx.Doer.ID)
+ if t != nil {
+ // already enrolled
+ ctx.Flash.Error(ctx.Tr("settings.twofa_is_enrolled"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+ return
+ }
+ if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ ctx.ServerError("SettingsTwoFactor: Failed to check if already enrolled with GetTwoFactorByUID", err)
+ return
+ }
+
+ if ctx.HasError() {
+ if !twofaGenerateSecretAndQr(ctx) {
+ return
+ }
+ ctx.HTML(http.StatusOK, tplSettingsTwofaEnroll)
+ return
+ }
+
+ secretRaw := ctx.Session.Get("twofaSecret")
+ if secretRaw == nil {
+ ctx.Flash.Error(ctx.Tr("settings.twofa_failed_get_secret"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security/two_factor/enroll")
+ return
+ }
+
+ secret := secretRaw.(string)
+ if !totp.Validate(form.Passcode, secret) {
+ if !twofaGenerateSecretAndQr(ctx) {
+ return
+ }
+ ctx.Flash.Error(ctx.Tr("settings.passcode_invalid"))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security/two_factor/enroll")
+ return
+ }
+
+ t = &auth.TwoFactor{
+ UID: ctx.Doer.ID,
+ }
+ err = t.SetSecret(secret)
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to set secret", err)
+ return
+ }
+ token, err := t.GenerateScratchToken()
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor: Failed to generate scratch token", err)
+ return
+ }
+
+ // Now we have to delete the secrets - because if we fail to insert then it's highly likely that they have already been used
+ // If we can detect the unique constraint failure below we can move this to after the NewTwoFactor
+ if err := ctx.Session.Delete("twofaSecret"); err != nil {
+ // tolerate this failure - it's more important to continue
+ log.Error("Unable to delete twofaSecret from the session: Error: %v", err)
+ }
+ if err := ctx.Session.Delete("twofaUri"); err != nil {
+ // tolerate this failure - it's more important to continue
+ log.Error("Unable to delete twofaUri from the session: Error: %v", err)
+ }
+ if err := ctx.Session.Release(); err != nil {
+ // tolerate this failure - it's more important to continue
+ log.Error("Unable to save changes to the session: %v", err)
+ }
+
+ if err := mailer.SendTOTPEnrolled(ctx, ctx.Doer); err != nil {
+ ctx.ServerError("SendTOTPEnrolled", err)
+ return
+ }
+
+ if err = auth.NewTwoFactor(ctx, t); err != nil {
+ // FIXME: We need to handle a unique constraint fail here it's entirely possible that another request has beaten us.
+ // If there is a unique constraint fail we should just tolerate the error
+ ctx.ServerError("SettingsTwoFactor: Failed to save two factor", err)
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("settings.twofa_enrolled", token))
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+}
diff --git a/routers/web/user/setting/security/openid.go b/routers/web/user/setting/security/openid.go
new file mode 100644
index 0000000..8f788e1
--- /dev/null
+++ b/routers/web/user/setting/security/openid.go
@@ -0,0 +1,126 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "net/http"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/openid"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+)
+
+// OpenIDPost response for change user's openid
+func OpenIDPost(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.AddOpenIDForm)
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ if ctx.HasError() {
+ loadSecurityData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsSecurity)
+ return
+ }
+
+ // WARNING: specifying a wrong OpenID here could lock
+ // a user out of her account, would be better to
+ // verify/confirm the new OpenID before storing it
+
+ // Also, consider allowing for multiple OpenID URIs
+
+ id, err := openid.Normalize(form.Openid)
+ if err != nil {
+ loadSecurityData(ctx)
+
+ ctx.RenderWithErr(err.Error(), tplSettingsSecurity, &form)
+ return
+ }
+ form.Openid = id
+ log.Trace("Normalized id: " + id)
+
+ oids, err := user_model.GetUserOpenIDs(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserOpenIDs", err)
+ return
+ }
+ ctx.Data["OpenIDs"] = oids
+
+ // Check that the OpenID is not already used
+ for _, obj := range oids {
+ if obj.URI == id {
+ loadSecurityData(ctx)
+
+ ctx.RenderWithErr(ctx.Tr("form.openid_been_used", id), tplSettingsSecurity, &form)
+ return
+ }
+ }
+
+ redirectTo := setting.AppURL + "user/settings/security"
+ url, err := openid.RedirectURL(id, redirectTo, setting.AppURL)
+ if err != nil {
+ loadSecurityData(ctx)
+
+ ctx.RenderWithErr(err.Error(), tplSettingsSecurity, &form)
+ return
+ }
+ ctx.Redirect(url)
+}
+
+func settingsOpenIDVerify(ctx *context.Context) {
+ log.Trace("Incoming call to: " + ctx.Req.URL.String())
+
+ fullURL := setting.AppURL + ctx.Req.URL.String()[1:]
+ log.Trace("Full URL: " + fullURL)
+
+ id, err := openid.Verify(fullURL)
+ if err != nil {
+ ctx.RenderWithErr(err.Error(), tplSettingsSecurity, &forms.AddOpenIDForm{
+ Openid: id,
+ })
+ return
+ }
+
+ log.Trace("Verified ID: " + id)
+
+ oid := &user_model.UserOpenID{UID: ctx.Doer.ID, URI: id}
+ if err = user_model.AddUserOpenID(ctx, oid); err != nil {
+ if user_model.IsErrOpenIDAlreadyUsed(err) {
+ ctx.RenderWithErr(ctx.Tr("form.openid_been_used", id), tplSettingsSecurity, &forms.AddOpenIDForm{Openid: id})
+ return
+ }
+ ctx.ServerError("AddUserOpenID", err)
+ return
+ }
+ log.Trace("Associated OpenID %s to user %s", id, ctx.Doer.Name)
+ ctx.Flash.Success(ctx.Tr("settings.add_openid_success"))
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+}
+
+// DeleteOpenID response for delete user's openid
+func DeleteOpenID(ctx *context.Context) {
+ if err := user_model.DeleteUserOpenID(ctx, &user_model.UserOpenID{ID: ctx.FormInt64("id"), UID: ctx.Doer.ID}); err != nil {
+ ctx.ServerError("DeleteUserOpenID", err)
+ return
+ }
+ log.Trace("OpenID address deleted: %s", ctx.Doer.Name)
+
+ ctx.Flash.Success(ctx.Tr("settings.openid_deletion_success"))
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/security")
+}
+
+// ToggleOpenIDVisibility response for toggle visibility of user's openid
+func ToggleOpenIDVisibility(ctx *context.Context) {
+ if err := user_model.ToggleUserOpenIDVisibility(ctx, ctx.FormInt64("id")); err != nil {
+ ctx.ServerError("ToggleUserOpenIDVisibility", err)
+ return
+ }
+
+ ctx.Redirect(setting.AppSubURL + "/user/settings/security")
+}
diff --git a/routers/web/user/setting/security/security.go b/routers/web/user/setting/security/security.go
new file mode 100644
index 0000000..8d6859a
--- /dev/null
+++ b/routers/web/user/setting/security/security.go
@@ -0,0 +1,148 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "net/http"
+ "sort"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+ "code.gitea.io/gitea/services/context"
+)
+
+const (
+ tplSettingsSecurity base.TplName = "user/settings/security/security"
+ tplSettingsTwofaEnroll base.TplName = "user/settings/security/twofa_enroll"
+)
+
+// Security render change user's password page and 2FA
+func Security(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings.security")
+ ctx.Data["PageIsSettingsSecurity"] = true
+
+ if ctx.FormString("openid.return_to") != "" {
+ settingsOpenIDVerify(ctx)
+ return
+ }
+
+ loadSecurityData(ctx)
+
+ ctx.HTML(http.StatusOK, tplSettingsSecurity)
+}
+
+// DeleteAccountLink delete a single account link
+func DeleteAccountLink(ctx *context.Context) {
+ id := ctx.FormInt64("id")
+ if id <= 0 {
+ ctx.Flash.Error("Account link id is not given")
+ } else {
+ if _, err := user_model.RemoveAccountLink(ctx, ctx.Doer, id); err != nil {
+ ctx.Flash.Error("RemoveAccountLink: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("settings.remove_account_link_success"))
+ }
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/security")
+}
+
+func loadSecurityData(ctx *context.Context) {
+ enrolled, err := auth_model.HasTwoFactorByUID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("SettingsTwoFactor", err)
+ return
+ }
+ ctx.Data["TOTPEnrolled"] = enrolled
+
+ credentials, err := auth_model.GetWebAuthnCredentialsByUID(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetWebAuthnCredentialsByUID", err)
+ return
+ }
+ ctx.Data["WebAuthnCredentials"] = credentials
+
+ tokens, err := db.Find[auth_model.AccessToken](ctx, auth_model.ListAccessTokensOptions{UserID: ctx.Doer.ID})
+ if err != nil {
+ ctx.ServerError("ListAccessTokens", err)
+ return
+ }
+ ctx.Data["Tokens"] = tokens
+
+ accountLinks, err := db.Find[user_model.ExternalLoginUser](ctx, user_model.FindExternalUserOptions{
+ UserID: ctx.Doer.ID,
+ OrderBy: "login_source_id DESC",
+ })
+ if err != nil {
+ ctx.ServerError("ListAccountLinks", err)
+ return
+ }
+
+ // map the provider display name with the AuthSource
+ sources := make(map[*auth_model.Source]string)
+ for _, externalAccount := range accountLinks {
+ if authSource, err := auth_model.GetSourceByID(ctx, externalAccount.LoginSourceID); err == nil {
+ var providerDisplayName string
+
+ type DisplayNamed interface {
+ DisplayName() string
+ }
+
+ type Named interface {
+ Name() string
+ }
+
+ if displayNamed, ok := authSource.Cfg.(DisplayNamed); ok {
+ providerDisplayName = displayNamed.DisplayName()
+ } else if named, ok := authSource.Cfg.(Named); ok {
+ providerDisplayName = named.Name()
+ } else {
+ providerDisplayName = authSource.Name
+ }
+ sources[authSource] = providerDisplayName
+ }
+ }
+ ctx.Data["AccountLinks"] = sources
+
+ authSources, err := db.Find[auth_model.Source](ctx, auth_model.FindSourcesOptions{
+ IsActive: optional.None[bool](),
+ LoginType: auth_model.OAuth2,
+ })
+ if err != nil {
+ ctx.ServerError("FindSources", err)
+ return
+ }
+
+ var orderedOAuth2Names []string
+ oauth2Providers := make(map[string]oauth2.Provider)
+ for _, source := range authSources {
+ provider, err := oauth2.CreateProviderFromSource(source)
+ if err != nil {
+ ctx.ServerError("CreateProviderFromSource", err)
+ return
+ }
+ oauth2Providers[source.Name] = provider
+ if source.IsActive {
+ orderedOAuth2Names = append(orderedOAuth2Names, source.Name)
+ }
+ }
+
+ sort.Strings(orderedOAuth2Names)
+
+ ctx.Data["OrderedOAuth2Names"] = orderedOAuth2Names
+ ctx.Data["OAuth2Providers"] = oauth2Providers
+
+ openid, err := user_model.GetUserOpenIDs(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.ServerError("GetUserOpenIDs", err)
+ return
+ }
+ ctx.Data["OpenIDs"] = openid
+}
diff --git a/routers/web/user/setting/security/webauthn.go b/routers/web/user/setting/security/webauthn.go
new file mode 100644
index 0000000..bfbc06c
--- /dev/null
+++ b/routers/web/user/setting/security/webauthn.go
@@ -0,0 +1,137 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package security
+
+import (
+ "errors"
+ "net/http"
+ "strconv"
+ "time"
+
+ "code.gitea.io/gitea/models/auth"
+ wa "code.gitea.io/gitea/modules/auth/webauthn"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/mailer"
+
+ "github.com/go-webauthn/webauthn/protocol"
+ "github.com/go-webauthn/webauthn/webauthn"
+)
+
+// WebAuthnRegister initializes the webauthn registration procedure
+func WebAuthnRegister(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.WebauthnRegistrationForm)
+ if form.Name == "" {
+ // Set name to the hexadecimal of the current time
+ form.Name = strconv.FormatInt(time.Now().UnixNano(), 16)
+ }
+
+ cred, err := auth.GetWebAuthnCredentialByName(ctx, ctx.Doer.ID, form.Name)
+ if err != nil && !auth.IsErrWebAuthnCredentialNotExist(err) {
+ ctx.ServerError("GetWebAuthnCredentialsByUID", err)
+ return
+ }
+ if cred != nil {
+ ctx.Error(http.StatusConflict, "Name already taken")
+ return
+ }
+
+ _ = ctx.Session.Delete("webauthnRegistration")
+ if err := ctx.Session.Set("webauthnName", form.Name); err != nil {
+ ctx.ServerError("Unable to set session key for webauthnName", err)
+ return
+ }
+
+ credentialOptions, sessionData, err := wa.WebAuthn.BeginRegistration((*wa.User)(ctx.Doer))
+ if err != nil {
+ ctx.ServerError("Unable to BeginRegistration", err)
+ return
+ }
+
+ // Save the session data as marshaled JSON
+ if err = ctx.Session.Set("webauthnRegistration", sessionData); err != nil {
+ ctx.ServerError("Unable to set session", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, credentialOptions)
+}
+
+// WebauthnRegisterPost receives the response of the security key
+func WebauthnRegisterPost(ctx *context.Context) {
+ name, ok := ctx.Session.Get("webauthnName").(string)
+ if !ok || name == "" {
+ ctx.ServerError("Get webauthnName", errors.New("no webauthnName"))
+ return
+ }
+
+ // Load the session data
+ sessionData, ok := ctx.Session.Get("webauthnRegistration").(*webauthn.SessionData)
+ if !ok || sessionData == nil {
+ ctx.ServerError("Get registration", errors.New("no registration"))
+ return
+ }
+ defer func() {
+ _ = ctx.Session.Delete("webauthnRegistration")
+ }()
+
+ // Verify that the challenge succeeded
+ cred, err := wa.WebAuthn.FinishRegistration((*wa.User)(ctx.Doer), *sessionData, ctx.Req)
+ if err != nil {
+ if pErr, ok := err.(*protocol.Error); ok {
+ log.Error("Unable to finish registration due to error: %v\nDevInfo: %s", pErr, pErr.DevInfo)
+ }
+ ctx.ServerError("CreateCredential", err)
+ return
+ }
+
+ dbCred, err := auth.GetWebAuthnCredentialByName(ctx, ctx.Doer.ID, name)
+ if err != nil && !auth.IsErrWebAuthnCredentialNotExist(err) {
+ ctx.ServerError("GetWebAuthnCredentialsByUID", err)
+ return
+ }
+ if dbCred != nil {
+ ctx.Error(http.StatusConflict, "Name already taken")
+ return
+ }
+
+ // Create the credential
+ _, err = auth.CreateCredential(ctx, ctx.Doer.ID, name, cred)
+ if err != nil {
+ ctx.ServerError("CreateCredential", err)
+ return
+ }
+ _ = ctx.Session.Delete("webauthnName")
+
+ ctx.JSON(http.StatusCreated, cred)
+}
+
+// WebauthnDelete deletes an security key by id
+func WebauthnDelete(ctx *context.Context) {
+ form := web.GetForm(ctx).(*forms.WebauthnDeleteForm)
+ cred, err := auth.GetWebAuthnCredentialByID(ctx, form.ID)
+ if err != nil || cred.UserID != ctx.Doer.ID {
+ if err != nil && !auth.IsErrWebAuthnCredentialNotExist(err) {
+ log.Error("GetWebAuthnCredentialByID: %v", err)
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/security")
+ return
+ }
+
+ if _, err := auth.DeleteCredential(ctx, form.ID, ctx.Doer.ID); err != nil {
+ ctx.ServerError("GetWebAuthnCredentialByID", err)
+ return
+ }
+
+ if err := mailer.SendRemovedSecurityKey(ctx, ctx.Doer, cred.Name); err != nil {
+ ctx.ServerError("SendRemovedSecurityKey", err)
+ return
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/security")
+}
diff --git a/routers/web/user/setting/webhooks.go b/routers/web/user/setting/webhooks.go
new file mode 100644
index 0000000..3cc67d9
--- /dev/null
+++ b/routers/web/user/setting/webhooks.go
@@ -0,0 +1,49 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/webhook"
+ "code.gitea.io/gitea/modules/base"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+ webhook_service "code.gitea.io/gitea/services/webhook"
+)
+
+const (
+ tplSettingsHooks base.TplName = "user/settings/hooks"
+)
+
+// Webhooks render webhook list page
+func Webhooks(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("settings")
+ ctx.Data["PageIsSettingsHooks"] = true
+ ctx.Data["BaseLink"] = setting.AppSubURL + "/user/settings/hooks"
+ ctx.Data["BaseLinkNew"] = setting.AppSubURL + "/user/settings/hooks"
+ ctx.Data["WebhookList"] = webhook_service.List()
+ ctx.Data["Description"] = ctx.Tr("settings.hooks.desc")
+
+ ws, err := db.Find[webhook.Webhook](ctx, webhook.ListWebhookOptions{OwnerID: ctx.Doer.ID})
+ if err != nil {
+ ctx.ServerError("ListWebhooksByOpts", err)
+ return
+ }
+
+ ctx.Data["Webhooks"] = ws
+ ctx.HTML(http.StatusOK, tplSettingsHooks)
+}
+
+// DeleteWebhook response for delete webhook
+func DeleteWebhook(ctx *context.Context) {
+ if err := webhook.DeleteWebhookByOwnerID(ctx, ctx.Doer.ID, ctx.FormInt64("id")); err != nil {
+ ctx.Flash.Error("DeleteWebhookByOwnerID: " + err.Error())
+ } else {
+ ctx.Flash.Success(ctx.Tr("repo.settings.webhook_deletion_success"))
+ }
+
+ ctx.JSONRedirect(setting.AppSubURL + "/user/settings/hooks")
+}
diff --git a/routers/web/user/stop_watch.go b/routers/web/user/stop_watch.go
new file mode 100644
index 0000000..38f74ea
--- /dev/null
+++ b/routers/web/user/stop_watch.go
@@ -0,0 +1,40 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+
+ "code.gitea.io/gitea/models/db"
+ issues_model "code.gitea.io/gitea/models/issues"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/convert"
+)
+
+// GetStopwatches get all stopwatches
+func GetStopwatches(ctx *context.Context) {
+ sws, err := issues_model.GetUserStopwatches(ctx, ctx.Doer.ID, db.ListOptions{
+ Page: ctx.FormInt("page"),
+ PageSize: convert.ToCorrectPageSize(ctx.FormInt("limit")),
+ })
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ count, err := issues_model.CountUserStopwatches(ctx, ctx.Doer.ID)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ apiSWs, err := convert.ToStopWatches(ctx, sws)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, err.Error())
+ return
+ }
+
+ ctx.SetTotalCountHeader(count)
+ ctx.JSON(http.StatusOK, apiSWs)
+}
diff --git a/routers/web/user/task.go b/routers/web/user/task.go
new file mode 100644
index 0000000..8476767
--- /dev/null
+++ b/routers/web/user/task.go
@@ -0,0 +1,53 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package user
+
+import (
+ "net/http"
+ "strconv"
+
+ admin_model "code.gitea.io/gitea/models/admin"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/services/context"
+)
+
+// TaskStatus returns task's status
+func TaskStatus(ctx *context.Context) {
+ task, opts, err := admin_model.GetMigratingTaskByID(ctx, ctx.ParamsInt64("task"), ctx.Doer.ID)
+ if err != nil {
+ if admin_model.IsErrTaskDoesNotExist(err) {
+ ctx.JSON(http.StatusNotFound, map[string]any{
+ "error": "task `" + strconv.FormatInt(ctx.ParamsInt64("task"), 10) + "` does not exist",
+ })
+ return
+ }
+ ctx.JSON(http.StatusInternalServerError, map[string]any{
+ "err": err,
+ })
+ return
+ }
+
+ message := task.Message
+
+ if task.Message != "" && task.Message[0] == '{' {
+ // assume message is actually a translatable string
+ var translatableMessage admin_model.TranslatableMessage
+ if err := json.Unmarshal([]byte(message), &translatableMessage); err != nil {
+ translatableMessage = admin_model.TranslatableMessage{
+ Format: "migrate.migrating_failed.error",
+ Args: []any{task.Message},
+ }
+ }
+ message = ctx.Locale.TrString(translatableMessage.Format, translatableMessage.Args...)
+ }
+
+ ctx.JSON(http.StatusOK, map[string]any{
+ "status": task.Status,
+ "message": message,
+ "repo-id": task.RepoID,
+ "repo-name": opts.RepoName,
+ "start": task.StartTime,
+ "end": task.EndTime,
+ })
+}
diff --git a/routers/web/web.go b/routers/web/web.go
new file mode 100644
index 0000000..c268f72
--- /dev/null
+++ b/routers/web/web.go
@@ -0,0 +1,1658 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ gocontext "context"
+ "net/http"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/perm"
+ quota_model "code.gitea.io/gitea/models/quota"
+ "code.gitea.io/gitea/models/unit"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/metrics"
+ "code.gitea.io/gitea/modules/public"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/validation"
+ "code.gitea.io/gitea/modules/web"
+ "code.gitea.io/gitea/modules/web/middleware"
+ "code.gitea.io/gitea/modules/web/routing"
+ "code.gitea.io/gitea/routers/common"
+ "code.gitea.io/gitea/routers/web/admin"
+ "code.gitea.io/gitea/routers/web/auth"
+ "code.gitea.io/gitea/routers/web/devtest"
+ "code.gitea.io/gitea/routers/web/events"
+ "code.gitea.io/gitea/routers/web/explore"
+ "code.gitea.io/gitea/routers/web/feed"
+ "code.gitea.io/gitea/routers/web/healthcheck"
+ "code.gitea.io/gitea/routers/web/misc"
+ "code.gitea.io/gitea/routers/web/org"
+ org_setting "code.gitea.io/gitea/routers/web/org/setting"
+ "code.gitea.io/gitea/routers/web/repo"
+ "code.gitea.io/gitea/routers/web/repo/actions"
+ "code.gitea.io/gitea/routers/web/repo/badges"
+ repo_flags "code.gitea.io/gitea/routers/web/repo/flags"
+ repo_setting "code.gitea.io/gitea/routers/web/repo/setting"
+ "code.gitea.io/gitea/routers/web/shared/project"
+ "code.gitea.io/gitea/routers/web/user"
+ user_setting "code.gitea.io/gitea/routers/web/user/setting"
+ "code.gitea.io/gitea/routers/web/user/setting/security"
+ auth_service "code.gitea.io/gitea/services/auth"
+ "code.gitea.io/gitea/services/context"
+ "code.gitea.io/gitea/services/forms"
+ "code.gitea.io/gitea/services/lfs"
+
+ _ "code.gitea.io/gitea/modules/session" // to registers all internal adapters
+
+ "code.forgejo.org/go-chi/captcha"
+ chi_middleware "github.com/go-chi/chi/v5/middleware"
+ "github.com/go-chi/cors"
+ "github.com/klauspost/compress/gzhttp"
+ "github.com/prometheus/client_golang/prometheus"
+)
+
+var GzipMinSize = gzhttp.DefaultMinSize
+
+// optionsCorsHandler return a http handler which sets CORS options if enabled by config, it blocks non-CORS OPTIONS requests.
+func optionsCorsHandler() func(next http.Handler) http.Handler {
+ var corsHandler func(next http.Handler) http.Handler
+ if setting.CORSConfig.Enabled {
+ corsHandler = cors.Handler(cors.Options{
+ AllowedOrigins: setting.CORSConfig.AllowDomain,
+ AllowedMethods: setting.CORSConfig.Methods,
+ AllowCredentials: setting.CORSConfig.AllowCredentials,
+ AllowedHeaders: setting.CORSConfig.Headers,
+ MaxAge: int(setting.CORSConfig.MaxAge.Seconds()),
+ })
+ }
+
+ return func(next http.Handler) http.Handler {
+ return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ if r.Method == http.MethodOptions {
+ if corsHandler != nil && r.Header.Get("Access-Control-Request-Method") != "" {
+ corsHandler(next).ServeHTTP(w, r)
+ } else {
+ // it should explicitly deny OPTIONS requests if CORS handler is not executed, to avoid the next GET/POST handler being incorrectly called by the OPTIONS request
+ w.WriteHeader(http.StatusMethodNotAllowed)
+ }
+ return
+ }
+ // for non-OPTIONS requests, call the CORS handler to add some related headers like "Vary"
+ if corsHandler != nil {
+ corsHandler(next).ServeHTTP(w, r)
+ } else {
+ next.ServeHTTP(w, r)
+ }
+ })
+ }
+}
+
+// The OAuth2 plugin is expected to be executed first, as it must ignore the user id stored
+// in the session (if there is a user id stored in session other plugins might return the user
+// object for that id).
+//
+// The Session plugin is expected to be executed second, in order to skip authentication
+// for users that have already signed in.
+func buildAuthGroup() *auth_service.Group {
+ group := auth_service.NewGroup()
+ group.Add(&auth_service.OAuth2{}) // FIXME: this should be removed and only applied in download and oauth related routers
+ group.Add(&auth_service.Basic{}) // FIXME: this should be removed and only applied in download and git/lfs routers
+
+ if setting.Service.EnableReverseProxyAuth {
+ group.Add(&auth_service.ReverseProxy{}) // reverseproxy should before Session, otherwise the header will be ignored if user has login
+ }
+ group.Add(&auth_service.Session{})
+
+ if setting.IsWindows && auth_model.IsSSPIEnabled(db.DefaultContext) {
+ group.Add(&auth_service.SSPI{}) // it MUST be the last, see the comment of SSPI
+ }
+
+ return group
+}
+
+func webAuth(authMethod auth_service.Method) func(*context.Context) {
+ return func(ctx *context.Context) {
+ ar, err := common.AuthShared(ctx.Base, ctx.Session, authMethod)
+ if err != nil {
+ log.Error("Failed to verify user: %v", err)
+ ctx.Error(http.StatusUnauthorized, ctx.Locale.TrString("auth.unauthorized_credentials", "https://codeberg.org/forgejo/forgejo/issues/2809"))
+ return
+ }
+ ctx.Doer = ar.Doer
+ ctx.IsSigned = ar.Doer != nil
+ ctx.IsBasicAuth = ar.IsBasicAuth
+ if ctx.Doer == nil {
+ // ensure the session uid is deleted
+ _ = ctx.Session.Delete("uid")
+ }
+
+ ctx.Csrf.PrepareForSessionUser(ctx)
+ }
+}
+
+// verifyAuthWithOptions checks authentication according to options
+func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ // Check prohibit login users.
+ if ctx.IsSigned {
+ if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm {
+ ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
+ ctx.HTML(http.StatusOK, "user/auth/activate")
+ return
+ }
+ if !ctx.Doer.IsActive || ctx.Doer.ProhibitLogin {
+ log.Info("Failed authentication attempt for %s from %s", ctx.Doer.Name, ctx.RemoteAddr())
+ ctx.Data["Title"] = ctx.Tr("auth.prohibit_login")
+ ctx.HTML(http.StatusOK, "user/auth/prohibit_login")
+ return
+ }
+
+ if ctx.Doer.MustChangePassword {
+ if ctx.Req.URL.Path != "/user/settings/change_password" {
+ if strings.HasPrefix(ctx.Req.UserAgent(), "git") {
+ ctx.Error(http.StatusUnauthorized, ctx.Locale.TrString("auth.must_change_password"))
+ return
+ }
+ ctx.Data["Title"] = ctx.Tr("auth.must_change_password")
+ ctx.Data["ChangePasscodeLink"] = setting.AppSubURL + "/user/change_password"
+ if ctx.Req.URL.Path != "/user/events" {
+ middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/settings/change_password")
+ return
+ }
+ } else if ctx.Req.URL.Path == "/user/settings/change_password" {
+ // make sure that the form cannot be accessed by users who don't need this
+ ctx.Redirect(setting.AppSubURL + "/")
+ return
+ }
+ }
+
+ // Redirect to dashboard (or alternate location) if user tries to visit any non-login page.
+ if options.SignOutRequired && ctx.IsSigned && ctx.Req.URL.RequestURI() != "/" {
+ ctx.RedirectToFirst(ctx.FormString("redirect_to"))
+ return
+ }
+
+ if !options.SignOutRequired && !options.DisableCSRF && ctx.Req.Method == "POST" {
+ ctx.Csrf.Validate(ctx)
+ if ctx.Written() {
+ return
+ }
+ }
+
+ if options.SignInRequired {
+ if !ctx.IsSigned {
+ if ctx.Req.URL.Path != "/user/events" {
+ middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/login")
+ return
+ } else if !ctx.Doer.IsActive && setting.Service.RegisterEmailConfirm {
+ ctx.Data["Title"] = ctx.Tr("auth.active_your_account")
+ ctx.HTML(http.StatusOK, "user/auth/activate")
+ return
+ }
+ }
+
+ // Redirect to log in page if auto-signin info is provided and has not signed in.
+ if !options.SignOutRequired && !ctx.IsSigned &&
+ ctx.GetSiteCookie(setting.CookieRememberName) != "" {
+ if ctx.Req.URL.Path != "/user/events" {
+ middleware.SetRedirectToCookie(ctx.Resp, setting.AppSubURL+ctx.Req.URL.RequestURI())
+ }
+ ctx.Redirect(setting.AppSubURL + "/user/login")
+ return
+ }
+
+ if options.AdminRequired {
+ if !ctx.Doer.IsAdmin {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ ctx.Data["PageIsAdmin"] = true
+ }
+ }
+}
+
+func ctxDataSet(args ...any) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ for i := 0; i < len(args); i += 2 {
+ ctx.Data[args[i].(string)] = args[i+1]
+ }
+ }
+}
+
+// Routes returns all web routes
+func Routes() *web.Route {
+ routes := web.NewRoute()
+
+ routes.Head("/", misc.DummyOK) // for health check - doesn't need to be passed through gzip handler
+ routes.Methods("GET, HEAD, OPTIONS", "/assets/*", optionsCorsHandler(), public.FileHandlerFunc())
+ routes.Methods("GET, HEAD", "/avatars/*", storageHandler(setting.Avatar.Storage, "avatars", storage.Avatars))
+ routes.Methods("GET, HEAD", "/repo-avatars/*", storageHandler(setting.RepoAvatar.Storage, "repo-avatars", storage.RepoAvatars))
+ routes.Methods("GET, HEAD", "/apple-touch-icon.png", misc.StaticRedirect("/assets/img/apple-touch-icon.png"))
+ routes.Methods("GET, HEAD", "/apple-touch-icon-precomposed.png", misc.StaticRedirect("/assets/img/apple-touch-icon.png"))
+ routes.Methods("GET, HEAD", "/favicon.ico", misc.StaticRedirect("/assets/img/favicon.png"))
+
+ _ = templates.HTMLRenderer()
+
+ var mid []any
+
+ if setting.EnableGzip {
+ wrapper, err := gzhttp.NewWrapper(gzhttp.RandomJitter(32, 0, false), gzhttp.MinSize(GzipMinSize))
+ if err != nil {
+ log.Fatal("gzhttp.NewWrapper failed: %v", err)
+ }
+ mid = append(mid, wrapper)
+ }
+
+ if setting.Service.EnableCaptcha {
+ // The captcha http.Handler should only fire on /captcha/* so we can just mount this on that url
+ routes.Methods("GET,HEAD", "/captcha/*", append(mid, captcha.Server(captcha.StdWidth, captcha.StdHeight).ServeHTTP)...)
+ }
+
+ if setting.Metrics.Enabled {
+ prometheus.MustRegister(metrics.NewCollector())
+ routes.Get("/metrics", append(mid, Metrics)...)
+ }
+
+ routes.Methods("GET,HEAD", "/robots.txt", append(mid, misc.RobotsTxt)...)
+ routes.Get("/ssh_info", misc.SSHInfo)
+ routes.Get("/api/healthz", healthcheck.Check)
+
+ mid = append(mid, common.Sessioner(), context.Contexter())
+
+ // Get user from session if logged in.
+ mid = append(mid, webAuth(buildAuthGroup()))
+
+ // GetHead allows a HEAD request redirect to GET if HEAD method is not defined for that route
+ mid = append(mid, chi_middleware.GetHead)
+
+ if setting.API.EnableSwagger {
+ // Note: The route is here but no in API routes because it renders a web page
+ routes.Get("/api/swagger", append(mid, misc.Swagger)...) // Render V1 by default
+ routes.Get("/api/forgejo/swagger", append(mid, misc.SwaggerForgejo)...)
+ }
+
+ // TODO: These really seem like things that could be folded into Contexter or as helper functions
+ mid = append(mid, user.GetNotificationCount)
+ mid = append(mid, repo.GetActiveStopwatch)
+ mid = append(mid, goGet)
+
+ others := web.NewRoute()
+ others.Use(mid...)
+ registerRoutes(others)
+ routes.Mount("", others)
+ return routes
+}
+
+var ignSignInAndCsrf = verifyAuthWithOptions(&common.VerifyOptions{DisableCSRF: true})
+
+// registerRoutes register routes
+func registerRoutes(m *web.Route) {
+ reqSignIn := verifyAuthWithOptions(&common.VerifyOptions{SignInRequired: true})
+ reqSignOut := verifyAuthWithOptions(&common.VerifyOptions{SignOutRequired: true})
+ // TODO: rename them to "optSignIn", which means that the "sign-in" could be optional, depends on the VerifyOptions (RequireSignInView)
+ ignSignIn := verifyAuthWithOptions(&common.VerifyOptions{SignInRequired: setting.Service.RequireSignInView})
+ ignExploreSignIn := verifyAuthWithOptions(&common.VerifyOptions{SignInRequired: setting.Service.RequireSignInView || setting.Service.Explore.RequireSigninView})
+
+ validation.AddBindingRules()
+
+ linkAccountEnabled := func(ctx *context.Context) {
+ if !setting.Service.EnableOpenIDSignIn && !setting.Service.EnableOpenIDSignUp && !setting.OAuth2.Enabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ openIDSignInEnabled := func(ctx *context.Context) {
+ if !setting.Service.EnableOpenIDSignIn {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ openIDSignUpEnabled := func(ctx *context.Context) {
+ if !setting.Service.EnableOpenIDSignUp {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ reqMilestonesDashboardPageEnabled := func(ctx *context.Context) {
+ if !setting.Service.ShowMilestonesDashboardPage {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ // webhooksEnabled requires webhooks to be enabled by admin.
+ webhooksEnabled := func(ctx *context.Context) {
+ if setting.DisableWebhooks {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ lfsServerEnabled := func(ctx *context.Context) {
+ if !setting.LFS.StartServer {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ federationEnabled := func(ctx *context.Context) {
+ if !setting.Federation.Enabled {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ dlSourceEnabled := func(ctx *context.Context) {
+ if setting.Repository.DisableDownloadSourceArchives {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ sitemapEnabled := func(ctx *context.Context) {
+ if !setting.Other.EnableSitemap {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ packagesEnabled := func(ctx *context.Context) {
+ if !setting.Packages.Enabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ }
+
+ feedEnabled := func(ctx *context.Context) {
+ if !setting.Other.EnableFeed {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+ }
+
+ reqUnitAccess := func(unitType unit.Type, accessMode perm.AccessMode, ignoreGlobal bool) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ // only check global disabled units when ignoreGlobal is false
+ if !ignoreGlobal && unitType.UnitGlobalDisabled() {
+ ctx.NotFound(unitType.String(), nil)
+ return
+ }
+
+ if ctx.ContextUser == nil {
+ ctx.NotFound(unitType.String(), nil)
+ return
+ }
+
+ if ctx.ContextUser.IsOrganization() {
+ if ctx.Org.Organization.UnitPermission(ctx, ctx.Doer, unitType) < accessMode {
+ ctx.NotFound(unitType.String(), nil)
+ return
+ }
+ }
+ }
+ }
+
+ addSettingsVariablesRoutes := func() {
+ m.Group("/variables", func() {
+ m.Get("", repo_setting.Variables)
+ m.Post("/new", web.Bind(forms.EditVariableForm{}), repo_setting.VariableCreate)
+ m.Post("/{variable_id}/edit", web.Bind(forms.EditVariableForm{}), repo_setting.VariableUpdate)
+ m.Post("/{variable_id}/delete", repo_setting.VariableDelete)
+ })
+ }
+
+ addSettingsSecretsRoutes := func() {
+ m.Group("/secrets", func() {
+ m.Get("", repo_setting.Secrets)
+ m.Post("", web.Bind(forms.AddSecretForm{}), repo_setting.SecretsPost)
+ m.Post("/delete", repo_setting.SecretsDelete)
+ })
+ }
+
+ addSettingsRunnersRoutes := func() {
+ m.Group("/runners", func() {
+ m.Get("", repo_setting.Runners)
+ m.Combo("/{runnerid}").Get(repo_setting.RunnersEdit).
+ Post(web.Bind(forms.EditRunnerForm{}), repo_setting.RunnersEditPost)
+ m.Post("/{runnerid}/delete", repo_setting.RunnerDeletePost)
+ m.Get("/reset_registration_token", repo_setting.ResetRunnerRegistrationToken)
+ })
+ }
+
+ // FIXME: not all routes need go through same middleware.
+ // Especially some AJAX requests, we can reduce middleware number to improve performance.
+
+ m.Get("/", Home)
+ m.Get("/sitemap.xml", sitemapEnabled, ignExploreSignIn, HomeSitemap)
+ m.Group("/.well-known", func() {
+ m.Get("/openid-configuration", auth.OIDCWellKnown)
+ m.Group("", func() {
+ m.Get("/nodeinfo", NodeInfoLinks)
+ m.Get("/webfinger", WebfingerQuery)
+ }, federationEnabled)
+ m.Get("/change-password", func(ctx *context.Context) {
+ ctx.Redirect(setting.AppSubURL + "/user/settings/account")
+ })
+ m.Methods("GET, HEAD", "/*", public.FileHandlerFunc())
+ }, optionsCorsHandler())
+
+ m.Group("/explore", func() {
+ m.Get("", func(ctx *context.Context) {
+ ctx.Redirect(setting.AppSubURL + "/explore/repos")
+ })
+ m.Get("/repos", explore.Repos)
+ m.Get("/repos/sitemap-{idx}.xml", sitemapEnabled, explore.Repos)
+ m.Get("/users", explore.Users)
+ m.Get("/users/sitemap-{idx}.xml", sitemapEnabled, explore.Users)
+ m.Get("/organizations", explore.Organizations)
+ m.Get("/code", func(ctx *context.Context) {
+ if unit.TypeCode.UnitGlobalDisabled() {
+ ctx.NotFound(unit.TypeCode.String(), nil)
+ return
+ }
+ }, explore.Code)
+ m.Get("/topics/search", explore.TopicSearch)
+ }, ignExploreSignIn)
+ m.Group("/issues", func() {
+ m.Get("", user.Issues)
+ m.Get("/search", repo.SearchIssues)
+ }, reqSignIn)
+
+ m.Get("/pulls", reqSignIn, user.Pulls)
+ m.Get("/milestones", reqSignIn, reqMilestonesDashboardPageEnabled, user.Milestones)
+
+ // ***** START: User *****
+ // "user/login" doesn't need signOut, then logged-in users can still access this route for redirection purposes by "/user/login?redirec_to=..."
+ m.Get("/user/login", auth.SignIn)
+ m.Group("/user", func() {
+ m.Post("/login", web.Bind(forms.SignInForm{}), auth.SignInPost)
+ m.Group("", func() {
+ m.Combo("/login/openid").
+ Get(auth.SignInOpenID).
+ Post(web.Bind(forms.SignInOpenIDForm{}), auth.SignInOpenIDPost)
+ }, openIDSignInEnabled)
+ m.Group("/openid", func() {
+ m.Combo("/connect").
+ Get(auth.ConnectOpenID).
+ Post(web.Bind(forms.ConnectOpenIDForm{}), auth.ConnectOpenIDPost)
+ m.Group("/register", func() {
+ m.Combo("").
+ Get(auth.RegisterOpenID, openIDSignUpEnabled).
+ Post(web.Bind(forms.SignUpOpenIDForm{}), auth.RegisterOpenIDPost)
+ }, openIDSignUpEnabled)
+ }, openIDSignInEnabled)
+ m.Get("/sign_up", auth.SignUp)
+ m.Post("/sign_up", web.Bind(forms.RegisterForm{}), auth.SignUpPost)
+ m.Get("/link_account", linkAccountEnabled, auth.LinkAccount)
+ m.Post("/link_account_signin", linkAccountEnabled, web.Bind(forms.SignInForm{}), auth.LinkAccountPostSignIn)
+ m.Post("/link_account_signup", linkAccountEnabled, web.Bind(forms.RegisterForm{}), auth.LinkAccountPostRegister)
+ m.Group("/two_factor", func() {
+ m.Get("", auth.TwoFactor)
+ m.Post("", web.Bind(forms.TwoFactorAuthForm{}), auth.TwoFactorPost)
+ m.Get("/scratch", auth.TwoFactorScratch)
+ m.Post("/scratch", web.Bind(forms.TwoFactorScratchAuthForm{}), auth.TwoFactorScratchPost)
+ })
+ m.Group("/webauthn", func() {
+ m.Get("", auth.WebAuthn)
+ m.Get("/assertion", auth.WebAuthnLoginAssertion)
+ m.Post("/assertion", auth.WebAuthnLoginAssertionPost)
+ })
+ }, reqSignOut)
+
+ m.Any("/user/events", routing.MarkLongPolling, events.Events)
+
+ m.Group("/login/oauth", func() {
+ m.Get("/authorize", web.Bind(forms.AuthorizationForm{}), auth.AuthorizeOAuth)
+ m.Post("/grant", web.Bind(forms.GrantApplicationForm{}), auth.GrantApplicationOAuth)
+ // TODO manage redirection
+ m.Post("/authorize", web.Bind(forms.AuthorizationForm{}), auth.AuthorizeOAuth)
+ }, ignSignInAndCsrf, reqSignIn)
+
+ m.Methods("GET, OPTIONS", "/login/oauth/userinfo", optionsCorsHandler(), ignSignInAndCsrf, auth.InfoOAuth)
+ m.Methods("POST, OPTIONS", "/login/oauth/access_token", optionsCorsHandler(), web.Bind(forms.AccessTokenForm{}), ignSignInAndCsrf, auth.AccessTokenOAuth)
+ m.Methods("GET, OPTIONS", "/login/oauth/keys", optionsCorsHandler(), ignSignInAndCsrf, auth.OIDCKeys)
+ m.Methods("POST, OPTIONS", "/login/oauth/introspect", optionsCorsHandler(), web.Bind(forms.IntrospectTokenForm{}), ignSignInAndCsrf, auth.IntrospectOAuth)
+
+ m.Group("/user/settings", func() {
+ m.Get("", user_setting.Profile)
+ m.Post("", web.Bind(forms.UpdateProfileForm{}), user_setting.ProfilePost)
+ m.Get("/change_password", auth.MustChangePassword)
+ m.Post("/change_password", web.Bind(forms.MustChangePasswordForm{}), auth.MustChangePasswordPost)
+ m.Post("/avatar", web.Bind(forms.AvatarForm{}), user_setting.AvatarPost)
+ m.Post("/avatar/delete", user_setting.DeleteAvatar)
+ m.Group("/account", func() {
+ m.Combo("").Get(user_setting.Account).Post(web.Bind(forms.ChangePasswordForm{}), user_setting.AccountPost)
+ m.Post("/email", web.Bind(forms.AddEmailForm{}), user_setting.EmailPost)
+ m.Post("/email/delete", user_setting.DeleteEmail)
+ m.Post("/delete", user_setting.DeleteAccount)
+ })
+ m.Group("/appearance", func() {
+ m.Get("", user_setting.Appearance)
+ m.Post("/language", web.Bind(forms.UpdateLanguageForm{}), user_setting.UpdateUserLang)
+ m.Post("/hints", web.Bind(forms.UpdateHintsForm{}), user_setting.UpdateUserHints)
+ m.Post("/hidden_comments", user_setting.UpdateUserHiddenComments)
+ m.Post("/theme", web.Bind(forms.UpdateThemeForm{}), user_setting.UpdateUIThemePost)
+ })
+ m.Group("/security", func() {
+ m.Get("", security.Security)
+ m.Group("/two_factor", func() {
+ m.Post("/regenerate_scratch", security.RegenerateScratchTwoFactor)
+ m.Post("/disable", security.DisableTwoFactor)
+ m.Get("/enroll", security.EnrollTwoFactor)
+ m.Post("/enroll", web.Bind(forms.TwoFactorAuthForm{}), security.EnrollTwoFactorPost)
+ })
+ m.Group("/webauthn", func() {
+ m.Post("/request_register", web.Bind(forms.WebauthnRegistrationForm{}), security.WebAuthnRegister)
+ m.Post("/register", security.WebauthnRegisterPost)
+ m.Post("/delete", web.Bind(forms.WebauthnDeleteForm{}), security.WebauthnDelete)
+ })
+ m.Group("/openid", func() {
+ m.Post("", web.Bind(forms.AddOpenIDForm{}), security.OpenIDPost)
+ m.Post("/delete", security.DeleteOpenID)
+ m.Post("/toggle_visibility", security.ToggleOpenIDVisibility)
+ }, openIDSignInEnabled)
+ m.Post("/account_link", linkAccountEnabled, security.DeleteAccountLink)
+ })
+ m.Group("/applications/oauth2", func() {
+ m.Get("/{id}", user_setting.OAuth2ApplicationShow)
+ m.Post("/{id}", web.Bind(forms.EditOAuth2ApplicationForm{}), user_setting.OAuthApplicationsEdit)
+ m.Post("/{id}/regenerate_secret", user_setting.OAuthApplicationsRegenerateSecret)
+ m.Post("", web.Bind(forms.EditOAuth2ApplicationForm{}), user_setting.OAuthApplicationsPost)
+ m.Post("/{id}/delete", user_setting.DeleteOAuth2Application)
+ m.Post("/{id}/revoke/{grantId}", user_setting.RevokeOAuth2Grant)
+ })
+ m.Combo("/applications").Get(user_setting.Applications).
+ Post(web.Bind(forms.NewAccessTokenForm{}), user_setting.ApplicationsPost)
+ m.Post("/applications/delete", user_setting.DeleteApplication)
+ m.Combo("/keys").Get(user_setting.Keys).
+ Post(web.Bind(forms.AddKeyForm{}), user_setting.KeysPost)
+ m.Post("/keys/delete", user_setting.DeleteKey)
+ m.Group("/packages", func() {
+ m.Get("", user_setting.Packages)
+ m.Group("/rules", func() {
+ m.Group("/add", func() {
+ m.Get("", user_setting.PackagesRuleAdd)
+ m.Post("", web.Bind(forms.PackageCleanupRuleForm{}), user_setting.PackagesRuleAddPost)
+ })
+ m.Group("/{id}", func() {
+ m.Get("", user_setting.PackagesRuleEdit)
+ m.Post("", web.Bind(forms.PackageCleanupRuleForm{}), user_setting.PackagesRuleEditPost)
+ m.Get("/preview", user_setting.PackagesRulePreview)
+ })
+ })
+ m.Group("/cargo", func() {
+ m.Post("/initialize", user_setting.InitializeCargoIndex)
+ m.Post("/rebuild", user_setting.RebuildCargoIndex)
+ })
+ m.Post("/chef/regenerate_keypair", user_setting.RegenerateChefKeyPair)
+ }, packagesEnabled)
+
+ m.Group("/actions", func() {
+ m.Get("", user_setting.RedirectToDefaultSetting)
+ addSettingsRunnersRoutes()
+ addSettingsSecretsRoutes()
+ addSettingsVariablesRoutes()
+ }, actions.MustEnableActions)
+
+ m.Get("/organization", user_setting.Organization)
+ m.Get("/repos", user_setting.Repos)
+ m.Post("/repos/unadopted", user_setting.AdoptOrDeleteRepository)
+
+ m.Group("/hooks", func() {
+ m.Get("", user_setting.Webhooks)
+ m.Post("/delete", user_setting.DeleteWebhook)
+ m.Get("/{type}/new", repo_setting.WebhookNew)
+ m.Post("/{type}/new", repo_setting.WebhookCreate)
+ m.Group("/{id}", func() {
+ m.Get("", repo_setting.WebhookEdit)
+ m.Post("", repo_setting.WebhookUpdate)
+ m.Post("/replay/{uuid}", repo_setting.WebhookReplay)
+ })
+ }, webhooksEnabled)
+
+ m.Group("/blocked_users", func() {
+ m.Get("", user_setting.BlockedUsers)
+ m.Post("/unblock", user_setting.UnblockUser)
+ })
+ }, reqSignIn, ctxDataSet("PageIsUserSettings", true, "AllThemes", setting.UI.Themes, "EnablePackages", setting.Packages.Enabled))
+
+ m.Group("/user", func() {
+ m.Get("/activate", auth.Activate)
+ m.Post("/activate", auth.ActivatePost)
+ m.Any("/activate_email", auth.ActivateEmail)
+ m.Get("/avatar/{username}/{size}", user.AvatarByUserName)
+ m.Get("/recover_account", auth.ResetPasswd)
+ m.Post("/recover_account", auth.ResetPasswdPost)
+ m.Get("/forgot_password", auth.ForgotPasswd)
+ m.Post("/forgot_password", auth.ForgotPasswdPost)
+ m.Post("/logout", auth.SignOut)
+ m.Get("/task/{task}", reqSignIn, user.TaskStatus)
+ m.Get("/stopwatches", reqSignIn, user.GetStopwatches)
+ m.Get("/search", ignExploreSignIn, user.Search)
+ m.Group("/oauth2", func() {
+ m.Get("/{provider}", auth.SignInOAuth)
+ m.Get("/{provider}/callback", auth.SignInOAuthCallback)
+ })
+ })
+ // ***** END: User *****
+
+ m.Get("/avatar/{hash}", user.AvatarByEmailHash)
+
+ adminReq := verifyAuthWithOptions(&common.VerifyOptions{SignInRequired: true, AdminRequired: true})
+
+ // ***** START: Admin *****
+ m.Group("/admin", func() {
+ m.Get("", admin.Dashboard)
+ m.Get("/system_status", admin.SystemStatus)
+ m.Post("", web.Bind(forms.AdminDashboardForm{}), admin.DashboardPost)
+
+ if setting.Database.Type.IsMySQL() {
+ m.Get("/self_check", admin.SelfCheck)
+ }
+
+ m.Group("/config", func() {
+ m.Get("", admin.Config)
+ m.Post("", admin.ChangeConfig)
+ m.Post("/test_mail", admin.SendTestMail)
+ m.Post("/test_cache", admin.TestCache)
+ m.Get("/settings", admin.ConfigSettings)
+ })
+
+ m.Group("/monitor", func() {
+ m.Get("/stats", admin.MonitorStats)
+ m.Get("/cron", admin.CronTasks)
+ m.Get("/stacktrace", admin.Stacktrace)
+ m.Post("/stacktrace/cancel/{pid}", admin.StacktraceCancel)
+ m.Get("/queue", admin.Queues)
+ m.Group("/queue/{qid}", func() {
+ m.Get("", admin.QueueManage)
+ m.Post("/set", admin.QueueSet)
+ m.Post("/remove-all-items", admin.QueueRemoveAllItems)
+ })
+ m.Get("/diagnosis", admin.MonitorDiagnosis)
+ })
+
+ m.Group("/users", func() {
+ m.Get("", admin.Users)
+ m.Combo("/new").Get(admin.NewUser).Post(web.Bind(forms.AdminCreateUserForm{}), admin.NewUserPost)
+ m.Get("/{userid}", admin.ViewUser)
+ m.Combo("/{userid}/edit").Get(admin.EditUser).Post(web.Bind(forms.AdminEditUserForm{}), admin.EditUserPost)
+ m.Post("/{userid}/delete", admin.DeleteUser)
+ m.Post("/{userid}/avatar", web.Bind(forms.AvatarForm{}), admin.AvatarPost)
+ m.Post("/{userid}/avatar/delete", admin.DeleteAvatar)
+ })
+
+ m.Group("/emails", func() {
+ m.Get("", admin.Emails)
+ m.Post("/activate", admin.ActivateEmail)
+ m.Post("/delete", admin.DeleteEmail)
+ })
+
+ m.Group("/orgs", func() {
+ m.Get("", admin.Organizations)
+ })
+
+ m.Group("/repos", func() {
+ m.Get("", admin.Repos)
+ m.Combo("/unadopted").Get(admin.UnadoptedRepos).Post(admin.AdoptOrDeleteRepository)
+ m.Post("/delete", admin.DeleteRepo)
+ })
+
+ m.Group("/packages", func() {
+ m.Get("", admin.Packages)
+ m.Post("/delete", admin.DeletePackageVersion)
+ m.Post("/cleanup", admin.CleanupExpiredData)
+ }, packagesEnabled)
+
+ m.Group("/hooks", func() {
+ m.Get("", admin.DefaultOrSystemWebhooks)
+ m.Post("/delete", admin.DeleteDefaultOrSystemWebhook)
+ m.Group("/{id}", func() {
+ m.Get("", repo_setting.WebhookEdit)
+ m.Post("", repo_setting.WebhookUpdate)
+ m.Post("/replay/{uuid}", repo_setting.WebhookReplay)
+ })
+ }, webhooksEnabled)
+
+ m.Group("/{configType:default-hooks|system-hooks}", func() {
+ m.Get("/{type}/new", repo_setting.WebhookNew)
+ m.Post("/{type}/new", repo_setting.WebhookCreate)
+ })
+
+ m.Group("/auths", func() {
+ m.Get("", admin.Authentications)
+ m.Combo("/new").Get(admin.NewAuthSource).Post(web.Bind(forms.AuthenticationForm{}), admin.NewAuthSourcePost)
+ m.Combo("/{authid}").Get(admin.EditAuthSource).
+ Post(web.Bind(forms.AuthenticationForm{}), admin.EditAuthSourcePost)
+ m.Post("/{authid}/delete", admin.DeleteAuthSource)
+ })
+
+ m.Group("/notices", func() {
+ m.Get("", admin.Notices)
+ m.Post("/delete", admin.DeleteNotices)
+ m.Post("/empty", admin.EmptyNotices)
+ })
+
+ m.Group("/applications", func() {
+ m.Get("", admin.Applications)
+ m.Post("/oauth2", web.Bind(forms.EditOAuth2ApplicationForm{}), admin.ApplicationsPost)
+ m.Group("/oauth2/{id}", func() {
+ m.Combo("").Get(admin.EditApplication).Post(web.Bind(forms.EditOAuth2ApplicationForm{}), admin.EditApplicationPost)
+ m.Post("/regenerate_secret", admin.ApplicationsRegenerateSecret)
+ m.Post("/delete", admin.DeleteApplication)
+ })
+ }, func(ctx *context.Context) {
+ if !setting.OAuth2.Enabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ })
+
+ m.Group("/actions", func() {
+ m.Get("", admin.RedirectToDefaultSetting)
+ addSettingsRunnersRoutes()
+ addSettingsVariablesRoutes()
+ })
+ }, adminReq, ctxDataSet("EnableOAuth2", setting.OAuth2.Enabled, "EnablePackages", setting.Packages.Enabled))
+ // ***** END: Admin *****
+
+ m.Group("", func() {
+ m.Get("/{username}", user.UsernameSubRoute)
+ m.Methods("GET, OPTIONS", "/attachments/{uuid}", optionsCorsHandler(), repo.GetAttachment)
+ }, ignSignIn)
+
+ m.Post("/{username}", reqSignIn, context.UserAssignmentWeb(), user.Action)
+
+ reqRepoAdmin := context.RequireRepoAdmin()
+ reqRepoCodeWriter := context.RequireRepoWriter(unit.TypeCode)
+ canEnableEditor := context.CanEnableEditor()
+ reqRepoCodeReader := context.RequireRepoReader(unit.TypeCode)
+ reqRepoReleaseWriter := context.RequireRepoWriter(unit.TypeReleases)
+ reqRepoReleaseReader := context.RequireRepoReader(unit.TypeReleases)
+ reqRepoWikiWriter := context.RequireRepoWriter(unit.TypeWiki)
+ reqRepoIssueReader := context.RequireRepoReader(unit.TypeIssues)
+ reqRepoPullsReader := context.RequireRepoReader(unit.TypePullRequests)
+ reqRepoIssuesOrPullsWriter := context.RequireRepoWriterOr(unit.TypeIssues, unit.TypePullRequests)
+ reqRepoIssuesOrPullsReader := context.RequireRepoReaderOr(unit.TypeIssues, unit.TypePullRequests)
+ reqRepoProjectsReader := context.RequireRepoReader(unit.TypeProjects)
+ reqRepoProjectsWriter := context.RequireRepoWriter(unit.TypeProjects)
+ reqRepoActionsReader := context.RequireRepoReader(unit.TypeActions)
+ reqRepoActionsWriter := context.RequireRepoWriter(unit.TypeActions)
+
+ reqPackageAccess := func(accessMode perm.AccessMode) func(ctx *context.Context) {
+ return func(ctx *context.Context) {
+ if ctx.Package.AccessMode < accessMode && !ctx.IsUserSiteAdmin() {
+ ctx.NotFound("", nil)
+ }
+ }
+ }
+
+ individualPermsChecker := func(ctx *context.Context) {
+ // org permissions have been checked in context.OrgAssignment(), but individual permissions haven't been checked.
+ if ctx.ContextUser.IsIndividual() {
+ switch {
+ case ctx.ContextUser.Visibility == structs.VisibleTypePrivate:
+ if ctx.Doer == nil || (ctx.ContextUser.ID != ctx.Doer.ID && !ctx.Doer.IsAdmin) {
+ ctx.NotFound("Visit Project", nil)
+ return
+ }
+ case ctx.ContextUser.Visibility == structs.VisibleTypeLimited:
+ if ctx.Doer == nil {
+ ctx.NotFound("Visit Project", nil)
+ return
+ }
+ }
+ }
+ }
+
+ // ***** START: Organization *****
+ m.Group("/org", func() {
+ m.Group("/{org}", func() {
+ m.Get("/members", org.Members)
+ }, context.OrgAssignment())
+ }, ignSignIn)
+
+ m.Group("/org", func() {
+ m.Group("", func() {
+ m.Get("/create", org.Create)
+ m.Post("/create", web.Bind(forms.CreateOrgForm{}), org.CreatePost)
+ })
+
+ m.Group("/invite/{token}", func() {
+ m.Get("", org.TeamInvite)
+ m.Post("", org.TeamInvitePost)
+ })
+
+ m.Group("/{org}", func() {
+ m.Get("/dashboard", user.Dashboard)
+ m.Get("/dashboard/{team}", user.Dashboard)
+ m.Get("/issues", user.Issues)
+ m.Get("/issues/{team}", user.Issues)
+ m.Get("/pulls", user.Pulls)
+ m.Get("/pulls/{team}", user.Pulls)
+ m.Get("/milestones", reqMilestonesDashboardPageEnabled, user.Milestones)
+ m.Get("/milestones/{team}", reqMilestonesDashboardPageEnabled, user.Milestones)
+ m.Post("/members/action/{action}", org.MembersAction)
+ m.Get("/teams", org.Teams)
+ }, context.OrgAssignment(true, false, true))
+
+ m.Group("/{org}", func() {
+ m.Get("/teams/{team}", org.TeamMembers)
+ m.Get("/teams/{team}/repositories", org.TeamRepositories)
+ m.Post("/teams/{team}/action/{action}", org.TeamsAction)
+ m.Post("/teams/{team}/action/repo/{action}", org.TeamsRepoAction)
+ }, context.OrgAssignment(true, false, true))
+
+ // require admin permission
+ m.Group("/{org}", func() {
+ m.Get("/teams/-/search", org.SearchTeam)
+ }, context.OrgAssignment(true, false, false, true))
+
+ // require owner permission
+ m.Group("/{org}", func() {
+ m.Get("/teams/new", org.NewTeam)
+ m.Post("/teams/new", web.Bind(forms.CreateTeamForm{}), org.NewTeamPost)
+ m.Get("/teams/{team}/edit", org.EditTeam)
+ m.Post("/teams/{team}/edit", web.Bind(forms.CreateTeamForm{}), org.EditTeamPost)
+ m.Post("/teams/{team}/delete", org.DeleteTeam)
+
+ m.Group("/settings", func() {
+ m.Combo("").Get(org.Settings).
+ Post(web.Bind(forms.UpdateOrgSettingForm{}), org.SettingsPost)
+ m.Post("/avatar", web.Bind(forms.AvatarForm{}), org.SettingsAvatar)
+ m.Post("/avatar/delete", org.SettingsDeleteAvatar)
+ m.Group("/applications", func() {
+ m.Get("", org.Applications)
+ m.Post("/oauth2", web.Bind(forms.EditOAuth2ApplicationForm{}), org.OAuthApplicationsPost)
+ m.Group("/oauth2/{id}", func() {
+ m.Combo("").Get(org.OAuth2ApplicationShow).Post(web.Bind(forms.EditOAuth2ApplicationForm{}), org.OAuth2ApplicationEdit)
+ m.Post("/regenerate_secret", org.OAuthApplicationsRegenerateSecret)
+ m.Post("/delete", org.DeleteOAuth2Application)
+ })
+ }, func(ctx *context.Context) {
+ if !setting.OAuth2.Enabled {
+ ctx.Error(http.StatusForbidden)
+ return
+ }
+ })
+
+ m.Group("/hooks", func() {
+ m.Get("", org.Webhooks)
+ m.Post("/delete", org.DeleteWebhook)
+ m.Get("/{type}/new", repo_setting.WebhookNew)
+ m.Post("/{type}/new", repo_setting.WebhookCreate)
+ m.Group("/{id}", func() {
+ m.Get("", repo_setting.WebhookEdit)
+ m.Post("", repo_setting.WebhookUpdate)
+ m.Post("/replay/{uuid}", repo_setting.WebhookReplay)
+ })
+ }, webhooksEnabled)
+
+ m.Group("/labels", func() {
+ m.Get("", org.RetrieveLabels, org.Labels)
+ m.Post("/new", web.Bind(forms.CreateLabelForm{}), org.NewLabel)
+ m.Post("/edit", web.Bind(forms.CreateLabelForm{}), org.UpdateLabel)
+ m.Post("/delete", org.DeleteLabel)
+ m.Post("/initialize", web.Bind(forms.InitializeLabelsForm{}), org.InitializeLabels)
+ })
+
+ m.Group("/actions", func() {
+ m.Get("", org_setting.RedirectToDefaultSetting)
+ addSettingsRunnersRoutes()
+ addSettingsSecretsRoutes()
+ addSettingsVariablesRoutes()
+ }, actions.MustEnableActions)
+
+ m.Methods("GET,POST", "/delete", org.SettingsDelete)
+
+ m.Group("/blocked_users", func() {
+ m.Get("", org_setting.BlockedUsers)
+ m.Post("/block", org_setting.BlockedUsersBlock)
+ m.Post("/unblock", org_setting.BlockedUsersUnblock)
+ })
+
+ m.Group("/packages", func() {
+ m.Get("", org.Packages)
+ m.Group("/rules", func() {
+ m.Group("/add", func() {
+ m.Get("", org.PackagesRuleAdd)
+ m.Post("", web.Bind(forms.PackageCleanupRuleForm{}), org.PackagesRuleAddPost)
+ })
+ m.Group("/{id}", func() {
+ m.Get("", org.PackagesRuleEdit)
+ m.Post("", web.Bind(forms.PackageCleanupRuleForm{}), org.PackagesRuleEditPost)
+ m.Get("/preview", org.PackagesRulePreview)
+ })
+ })
+ m.Group("/cargo", func() {
+ m.Post("/initialize", org.InitializeCargoIndex)
+ m.Post("/rebuild", org.RebuildCargoIndex)
+ })
+ }, packagesEnabled)
+ }, ctxDataSet("EnableOAuth2", setting.OAuth2.Enabled, "EnablePackages", setting.Packages.Enabled, "PageIsOrgSettings", true))
+ }, context.OrgAssignment(true, true))
+ }, reqSignIn)
+ // ***** END: Organization *****
+
+ // ***** START: Repository *****
+ m.Group("/repo", func() {
+ m.Get("/create", repo.Create)
+ m.Post("/create", web.Bind(forms.CreateRepoForm{}), repo.CreatePost)
+ m.Get("/migrate", repo.Migrate)
+ m.Post("/migrate", web.Bind(forms.MigrateRepoForm{}), repo.MigratePost)
+ if !setting.Repository.DisableForks {
+ m.Get("/fork/{repoid}", context.RepoIDAssignment(), context.UnitTypes(), reqRepoCodeReader, repo.ForkByID)
+ }
+ m.Get("/search", repo.SearchRepo)
+ }, reqSignIn)
+
+ m.Group("/{username}/-", func() {
+ if setting.Packages.Enabled {
+ m.Group("/packages", func() {
+ m.Get("", user.ListPackages)
+ m.Group("/{type}/{name}", func() {
+ m.Get("", user.RedirectToLastVersion)
+ m.Get("/versions", user.ListPackageVersions)
+ m.Group("/{version}", func() {
+ m.Get("", user.ViewPackageVersion)
+ m.Get("/files/{fileid}", user.DownloadPackageFile)
+ m.Group("/settings", func() {
+ m.Get("", user.PackageSettings)
+ m.Post("", web.Bind(forms.PackageSettingForm{}), user.PackageSettingsPost)
+ }, reqPackageAccess(perm.AccessModeWrite))
+ })
+ })
+ }, context.PackageAssignment(), reqPackageAccess(perm.AccessModeRead))
+ }
+
+ m.Group("/projects", func() {
+ m.Group("", func() {
+ m.Get("", org.Projects)
+ m.Get("/{id}", org.ViewProject)
+ }, reqUnitAccess(unit.TypeProjects, perm.AccessModeRead, true))
+ m.Group("", func() { //nolint:dupl
+ m.Get("/new", org.RenderNewProject)
+ m.Post("/new", web.Bind(forms.CreateProjectForm{}), org.NewProjectPost)
+ m.Group("/{id}", func() {
+ m.Post("", web.Bind(forms.EditProjectColumnForm{}), org.AddColumnToProjectPost)
+ m.Post("/move", project.MoveColumns)
+ m.Post("/delete", org.DeleteProject)
+
+ m.Get("/edit", org.RenderEditProject)
+ m.Post("/edit", web.Bind(forms.CreateProjectForm{}), org.EditProjectPost)
+ m.Post("/{action:open|close}", org.ChangeProjectStatus)
+
+ m.Group("/{columnID}", func() {
+ m.Put("", web.Bind(forms.EditProjectColumnForm{}), org.EditProjectColumn)
+ m.Delete("", org.DeleteProjectColumn)
+ m.Post("/default", org.SetDefaultProjectColumn)
+
+ m.Post("/move", org.MoveIssues)
+ })
+ })
+ }, reqSignIn, reqUnitAccess(unit.TypeProjects, perm.AccessModeWrite, true), func(ctx *context.Context) {
+ if ctx.ContextUser.IsIndividual() && ctx.ContextUser.ID != ctx.Doer.ID {
+ ctx.NotFound("NewProject", nil)
+ return
+ }
+ })
+ }, reqUnitAccess(unit.TypeProjects, perm.AccessModeRead, true), individualPermsChecker)
+
+ m.Group("", func() {
+ m.Get("/code", user.CodeSearch)
+ }, reqUnitAccess(unit.TypeCode, perm.AccessModeRead, false), individualPermsChecker)
+ }, ignSignIn, context.UserAssignmentWeb(), context.OrgAssignment()) // for "/{username}/-" (packages, projects, code)
+
+ m.Group("/{username}/{reponame}", func() {
+ m.Group("/settings", func() {
+ m.Group("", func() {
+ m.Combo("").Get(repo_setting.Settings).
+ Post(web.Bind(forms.RepoSettingForm{}), repo_setting.SettingsPost)
+ }, repo_setting.SettingsCtxData)
+ m.Combo("/units").Get(repo_setting.Units).
+ Post(web.Bind(forms.RepoUnitSettingForm{}), repo_setting.UnitsPost)
+ m.Post("/avatar", web.Bind(forms.AvatarForm{}), repo_setting.SettingsAvatar)
+ m.Post("/avatar/delete", repo_setting.SettingsDeleteAvatar)
+
+ m.Group("/collaboration", func() {
+ m.Combo("").Get(repo_setting.Collaboration).Post(repo_setting.CollaborationPost)
+ m.Post("/access_mode", repo_setting.ChangeCollaborationAccessMode)
+ m.Post("/delete", repo_setting.DeleteCollaboration)
+ m.Group("/team", func() {
+ m.Post("", repo_setting.AddTeamPost)
+ m.Post("/delete", repo_setting.DeleteTeam)
+ })
+ })
+
+ m.Group("/branches", func() {
+ m.Post("/", repo_setting.SetDefaultBranchPost)
+ }, repo.MustBeNotEmpty)
+
+ m.Group("/branches", func() {
+ m.Get("/", repo_setting.ProtectedBranchRules)
+ m.Combo("/edit").Get(repo_setting.SettingsProtectedBranch).
+ Post(web.Bind(forms.ProtectBranchForm{}), context.RepoMustNotBeArchived(), repo_setting.SettingsProtectedBranchPost)
+ m.Post("/{id}/delete", repo_setting.DeleteProtectedBranchRulePost)
+ }, repo.MustBeNotEmpty)
+
+ m.Post("/rename_branch", web.Bind(forms.RenameBranchForm{}), context.RepoMustNotBeArchived(), repo_setting.RenameBranchPost)
+
+ m.Group("/tags", func() {
+ m.Get("", repo_setting.ProtectedTags)
+ m.Post("", web.Bind(forms.ProtectTagForm{}), context.RepoMustNotBeArchived(), repo_setting.NewProtectedTagPost)
+ m.Post("/delete", context.RepoMustNotBeArchived(), repo_setting.DeleteProtectedTagPost)
+ m.Get("/{id}", repo_setting.EditProtectedTag)
+ m.Post("/{id}", web.Bind(forms.ProtectTagForm{}), context.RepoMustNotBeArchived(), repo_setting.EditProtectedTagPost)
+ })
+
+ m.Group("/hooks/git", func() {
+ m.Get("", repo_setting.GitHooks)
+ m.Combo("/{name}").Get(repo_setting.GitHooksEdit).
+ Post(repo_setting.GitHooksEditPost)
+ }, context.GitHookService())
+
+ m.Group("/hooks", func() {
+ m.Get("", repo_setting.WebhookList)
+ m.Post("/delete", repo_setting.WebhookDelete)
+ m.Get("/{type}/new", repo_setting.WebhookNew)
+ m.Post("/{type}/new", repo_setting.WebhookCreate)
+ m.Group("/{id}", func() {
+ m.Get("", repo_setting.WebhookEdit)
+ m.Post("", repo_setting.WebhookUpdate)
+ m.Post("/test", repo_setting.WebhookTest)
+ m.Post("/replay/{uuid}", repo_setting.WebhookReplay)
+ })
+ }, webhooksEnabled)
+
+ m.Group("/keys", func() {
+ m.Combo("").Get(repo_setting.DeployKeys).
+ Post(web.Bind(forms.AddKeyForm{}), repo_setting.DeployKeysPost)
+ m.Post("/delete", repo_setting.DeleteDeployKey)
+ })
+
+ m.Group("/lfs", func() {
+ m.Get("/", repo_setting.LFSFiles)
+ m.Get("/show/{oid}", repo_setting.LFSFileGet)
+ m.Post("/delete/{oid}", repo_setting.LFSDelete)
+ m.Get("/pointers", repo_setting.LFSPointerFiles)
+ m.Post("/pointers/associate", repo_setting.LFSAutoAssociate)
+ m.Get("/find", repo_setting.LFSFileFind)
+ m.Group("/locks", func() {
+ m.Get("/", repo_setting.LFSLocks)
+ m.Post("/", repo_setting.LFSLockFile)
+ m.Post("/{lid}/unlock", repo_setting.LFSUnlock)
+ })
+ })
+ m.Group("/actions", func() {
+ m.Get("", repo_setting.RedirectToDefaultSetting)
+ addSettingsRunnersRoutes()
+ addSettingsSecretsRoutes()
+ addSettingsVariablesRoutes()
+ }, actions.MustEnableActions)
+ // the follow handler must be under "settings", otherwise this incomplete repo can't be accessed
+ m.Group("/migrate", func() {
+ m.Post("/retry", repo.MigrateRetryPost)
+ m.Post("/cancel", repo.MigrateCancelPost)
+ })
+ }, ctxDataSet("PageIsRepoSettings", true, "LFSStartServer", setting.LFS.StartServer))
+ }, reqSignIn, context.RepoAssignment, context.UnitTypes(), reqRepoAdmin, context.RepoRef())
+
+ m.Group("/{username}/{reponame}/action", func() {
+ m.Post("/watch", repo.ActionWatch(true))
+ m.Post("/unwatch", repo.ActionWatch(false))
+ m.Post("/accept_transfer", repo.ActionTransfer(true))
+ m.Post("/reject_transfer", repo.ActionTransfer(false))
+ if !setting.Repository.DisableStars {
+ m.Post("/star", repo.ActionStar(true))
+ m.Post("/unstar", repo.ActionStar(false))
+ }
+ }, reqSignIn, context.RepoAssignment, context.UnitTypes())
+
+ // Grouping for those endpoints not requiring authentication (but should respect ignSignIn)
+ m.Group("/{username}/{reponame}", func() {
+ m.Group("/milestone", func() {
+ m.Get("/{id}", repo.MilestoneIssuesAndPulls)
+ }, reqRepoIssuesOrPullsReader, context.RepoRef())
+ m.Get("/find/*", repo.FindFiles)
+ m.Group("/tree-list", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.TreeList)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.TreeList)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.TreeList)
+ })
+ m.Get("/compare", repo.MustBeNotEmpty, reqRepoCodeReader, repo.SetEditorconfigIfExists, ignSignIn, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.CompareDiff)
+ m.Combo("/compare/*", repo.MustBeNotEmpty, reqRepoCodeReader, repo.SetEditorconfigIfExists).
+ Get(repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.CompareDiff).
+ Post(reqSignIn, context.RepoMustNotBeArchived(), reqRepoPullsReader, repo.MustAllowPulls, web.Bind(forms.CreateIssueForm{}), repo.SetWhitespaceBehavior, repo.CompareAndPullRequestPost)
+ m.Group("/{type:issues|pulls}", func() {
+ m.Group("/{index}", func() {
+ m.Get("/info", repo.GetIssueInfo)
+ })
+ })
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes()) // for "/{username}/{reponame}" which doesn't require authentication
+
+ // Grouping for those endpoints that do require authentication
+ m.Group("/{username}/{reponame}", func() {
+ if !setting.Repository.DisableForks {
+ m.Combo("/fork", reqRepoCodeReader).Get(repo.Fork).
+ Post(web.Bind(forms.CreateRepoForm{}), repo.ForkPost)
+ }
+ m.Group("/issues", func() {
+ m.Group("/new", func() {
+ m.Combo("").Get(context.RepoRef(), repo.NewIssue).
+ Post(web.Bind(forms.CreateIssueForm{}), repo.NewIssuePost)
+ m.Get("/choose", context.RepoRef(), repo.NewIssueChooseTemplate)
+ })
+ m.Get("/search", repo.ListIssues)
+ }, context.RepoMustNotBeArchived(), reqRepoIssueReader)
+ // FIXME: should use different URLs but mostly same logic for comments of issue and pull request.
+ // So they can apply their own enable/disable logic on routers.
+ m.Group("/{type:issues|pulls}", func() {
+ m.Group("/{index}", func() {
+ m.Post("/title", repo.UpdateIssueTitle)
+ m.Post("/content", repo.UpdateIssueContent)
+ m.Post("/deadline", web.Bind(structs.EditDeadlineOption{}), repo.UpdateIssueDeadline)
+ m.Post("/watch", repo.IssueWatch)
+ m.Post("/ref", repo.UpdateIssueRef)
+ m.Post("/pin", reqRepoAdmin, repo.IssuePinOrUnpin)
+ m.Post("/viewed-files", repo.UpdateViewedFiles)
+ m.Group("/dependency", func() {
+ m.Post("/add", repo.AddDependency)
+ m.Post("/delete", repo.RemoveDependency)
+ })
+ m.Combo("/comments").Post(repo.MustAllowUserComment, web.Bind(forms.CreateCommentForm{}), repo.NewComment)
+ m.Group("/times", func() {
+ m.Post("/add", web.Bind(forms.AddTimeManuallyForm{}), repo.AddTimeManually)
+ m.Post("/{timeid}/delete", repo.DeleteTime)
+ m.Group("/stopwatch", func() {
+ m.Post("/toggle", repo.IssueStopwatch)
+ m.Post("/cancel", repo.CancelStopwatch)
+ })
+ })
+ m.Post("/reactions/{action}", web.Bind(forms.ReactionForm{}), repo.ChangeIssueReaction)
+ m.Post("/lock", reqRepoIssuesOrPullsWriter, web.Bind(forms.IssueLockForm{}), repo.LockIssue)
+ m.Post("/unlock", reqRepoIssuesOrPullsWriter, repo.UnlockIssue)
+ m.Post("/delete", reqRepoAdmin, repo.DeleteIssue)
+ }, context.RepoMustNotBeArchived())
+ m.Group("/{index}", func() {
+ m.Get("/attachments", repo.GetIssueAttachments)
+ m.Get("/attachments/{uuid}", repo.GetAttachment)
+ })
+ m.Group("/{index}", func() {
+ m.Post("/content-history/soft-delete", repo.SoftDeleteContentHistory)
+ })
+
+ m.Post("/labels", reqRepoIssuesOrPullsWriter, repo.UpdateIssueLabel)
+ m.Post("/milestone", reqRepoIssuesOrPullsWriter, repo.UpdateIssueMilestone)
+ m.Post("/projects", reqRepoIssuesOrPullsWriter, reqRepoProjectsReader, repo.UpdateIssueProject)
+ m.Post("/assignee", reqRepoIssuesOrPullsWriter, repo.UpdateIssueAssignee)
+ m.Post("/request_review", reqRepoIssuesOrPullsReader, repo.UpdatePullReviewRequest)
+ m.Post("/dismiss_review", reqRepoAdmin, web.Bind(forms.DismissReviewForm{}), repo.DismissReview)
+ m.Post("/status", reqRepoIssuesOrPullsWriter, repo.UpdateIssueStatus)
+ m.Post("/delete", reqRepoAdmin, repo.BatchDeleteIssues)
+ m.Post("/resolve_conversation", reqRepoIssuesOrPullsReader, repo.SetShowOutdatedComments, repo.UpdateResolveConversation)
+ m.Post("/attachments", context.EnforceQuotaWeb(quota_model.LimitSubjectSizeAssetsAttachmentsIssues, context.QuotaTargetRepo), repo.UploadIssueAttachment)
+ m.Post("/attachments/remove", repo.DeleteAttachment)
+ m.Delete("/unpin/{index}", reqRepoAdmin, repo.IssueUnpin)
+ m.Post("/move_pin", reqRepoAdmin, repo.IssuePinMove)
+ }, context.RepoMustNotBeArchived())
+ m.Group("/comments/{id}", func() {
+ m.Post("", repo.UpdateCommentContent)
+ m.Post("/delete", repo.DeleteComment)
+ m.Post("/reactions/{action}", web.Bind(forms.ReactionForm{}), repo.ChangeCommentReaction)
+ }, context.RepoMustNotBeArchived())
+ m.Group("/comments/{id}", func() {
+ m.Get("/attachments", repo.GetCommentAttachments)
+ })
+ m.Post("/markup", web.Bind(structs.MarkupOption{}), misc.Markup)
+ m.Group("/labels", func() {
+ m.Post("/new", web.Bind(forms.CreateLabelForm{}), repo.NewLabel)
+ m.Post("/edit", web.Bind(forms.CreateLabelForm{}), repo.UpdateLabel)
+ m.Post("/delete", repo.DeleteLabel)
+ m.Post("/initialize", web.Bind(forms.InitializeLabelsForm{}), repo.InitializeLabels)
+ }, context.RepoMustNotBeArchived(), reqRepoIssuesOrPullsWriter, context.RepoRef())
+ m.Group("/milestones", func() {
+ m.Combo("/new").Get(repo.NewMilestone).
+ Post(web.Bind(forms.CreateMilestoneForm{}), repo.NewMilestonePost)
+ m.Get("/{id}/edit", repo.EditMilestone)
+ m.Post("/{id}/edit", web.Bind(forms.CreateMilestoneForm{}), repo.EditMilestonePost)
+ m.Post("/{id}/{action}", repo.ChangeMilestoneStatus)
+ m.Post("/delete", repo.DeleteMilestone)
+ }, context.RepoMustNotBeArchived(), reqRepoIssuesOrPullsWriter, context.RepoRef())
+ m.Group("/pull", func() {
+ m.Post("/{index}/target_branch", repo.UpdatePullRequestTarget)
+ }, context.RepoMustNotBeArchived())
+
+ m.Group("", func() {
+ m.Group("", func() {
+ m.Combo("/_edit/*").Get(repo.EditFile).
+ Post(web.Bind(forms.EditRepoFileForm{}), repo.EditFilePost)
+ m.Combo("/_new/*").Get(repo.NewFile).
+ Post(web.Bind(forms.EditRepoFileForm{}), repo.NewFilePost)
+ m.Post("/_preview/*", web.Bind(forms.EditPreviewDiffForm{}), repo.DiffPreviewPost)
+ m.Combo("/_delete/*").Get(repo.DeleteFile).
+ Post(web.Bind(forms.DeleteRepoFileForm{}), repo.DeleteFilePost)
+ m.Combo("/_upload/*", repo.MustBeAbleToUpload).
+ Get(repo.UploadFile).
+ Post(web.Bind(forms.UploadRepoFileForm{}), repo.UploadFilePost)
+ m.Combo("/_diffpatch/*").Get(repo.NewDiffPatch).
+ Post(web.Bind(forms.EditRepoFileForm{}), repo.NewDiffPatchPost)
+ m.Combo("/_cherrypick/{sha:([a-f0-9]{4,64})}/*").Get(repo.CherryPick).
+ Post(web.Bind(forms.CherryPickForm{}), repo.CherryPickPost)
+ }, repo.MustBeEditable, repo.CommonEditorData, context.EnforceQuotaWeb(quota_model.LimitSubjectSizeReposAll, context.QuotaTargetRepo))
+ m.Group("", func() {
+ m.Post("/upload-file", context.EnforceQuotaWeb(quota_model.LimitSubjectSizeReposAll, context.QuotaTargetRepo), repo.UploadFileToServer)
+ m.Post("/upload-remove", web.Bind(forms.RemoveUploadFileForm{}), repo.RemoveUploadFileFromServer)
+ }, repo.MustBeEditable, repo.MustBeAbleToUpload)
+ }, context.RepoRef(), canEnableEditor, context.RepoMustNotBeArchived())
+
+ m.Group("/branches", func() {
+ m.Group("/_new", func() {
+ m.Post("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.CreateBranch)
+ m.Post("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.CreateBranch)
+ m.Post("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.CreateBranch)
+ }, web.Bind(forms.NewBranchForm{}), context.EnforceQuotaWeb(quota_model.LimitSubjectSizeReposAll, context.QuotaTargetRepo))
+ m.Post("/delete", repo.DeleteBranchPost)
+ m.Post("/restore", repo.RestoreBranchPost)
+ }, context.RepoMustNotBeArchived(), reqRepoCodeWriter, repo.MustBeNotEmpty)
+ }, reqSignIn, context.RepoAssignment, context.UnitTypes())
+
+ // Tags
+ m.Group("/{username}/{reponame}", func() {
+ m.Group("/tags", func() {
+ m.Get("", repo.TagsList)
+ m.Get("/list", repo.GetTagList)
+ m.Get(".rss", feedEnabled, repo.TagsListFeedRSS)
+ m.Get(".atom", feedEnabled, repo.TagsListFeedAtom)
+ }, ctxDataSet("EnableFeed", setting.Other.EnableFeed),
+ repo.MustBeNotEmpty, reqRepoCodeReader, context.RepoRefByType(context.RepoRefTag, true))
+ m.Post("/tags/delete", repo.DeleteTag, reqSignIn,
+ repo.MustBeNotEmpty, context.RepoMustNotBeArchived(), reqRepoCodeWriter, context.RepoRef())
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes())
+
+ // Releases
+ m.Group("/{username}/{reponame}", func() {
+ m.Group("/releases", func() {
+ m.Get("/", repo.Releases)
+ m.Get("/tag/*", repo.SingleRelease)
+ m.Get("/latest", repo.LatestRelease)
+ m.Get(".rss", feedEnabled, repo.ReleasesFeedRSS)
+ m.Get(".atom", feedEnabled, repo.ReleasesFeedAtom)
+ }, ctxDataSet("EnableFeed", setting.Other.EnableFeed),
+ repo.MustBeNotEmpty, context.RepoRefByType(context.RepoRefTag, true))
+ m.Get("/releases/attachments/{uuid}", repo.MustBeNotEmpty, repo.GetAttachment)
+ m.Get("/releases/download/{vTag}/{fileName}", repo.MustBeNotEmpty, repo.RedirectDownload)
+ m.Group("/releases", func() {
+ m.Combo("/new", context.EnforceQuotaWeb(quota_model.LimitSubjectSizeReposAll, context.QuotaTargetRepo)).
+ Get(repo.NewRelease).
+ Post(web.Bind(forms.NewReleaseForm{}), repo.NewReleasePost)
+ m.Post("/delete", repo.DeleteRelease)
+ m.Post("/attachments", context.EnforceQuotaWeb(quota_model.LimitSubjectSizeAssetsAttachmentsReleases, context.QuotaTargetRepo), repo.UploadReleaseAttachment)
+ m.Post("/attachments/remove", repo.DeleteAttachment)
+ }, reqSignIn, repo.MustBeNotEmpty, context.RepoMustNotBeArchived(), reqRepoReleaseWriter, context.RepoRef())
+ m.Group("/releases", func() {
+ m.Get("/edit/*", repo.EditRelease)
+ m.Post("/edit/*", web.Bind(forms.EditReleaseForm{}), repo.EditReleasePost)
+ }, reqSignIn, repo.MustBeNotEmpty, context.RepoMustNotBeArchived(), reqRepoReleaseWriter, repo.CommitInfoCache, context.EnforceQuotaWeb(quota_model.LimitSubjectSizeReposAll, context.QuotaTargetRepo))
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes(), reqRepoReleaseReader)
+
+ // to maintain compatibility with old attachments
+ m.Group("/{username}/{reponame}", func() {
+ m.Get("/attachments/{uuid}", repo.GetAttachment)
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes())
+
+ m.Group("/{username}/{reponame}", func() {
+ m.Post("/topics", repo.TopicsPost)
+ }, context.RepoAssignment, context.RepoMustNotBeArchived(), reqRepoAdmin)
+
+ m.Group("/{username}/{reponame}", func() {
+ m.Group("", func() {
+ m.Get("/issues/posters", repo.IssuePosters) // it can't use {type:issues|pulls} because other routes like "/pulls/{index}" has higher priority
+ m.Get("/{type:issues|pulls}", repo.Issues)
+ m.Get("/{type:issues|pulls}/{index}", repo.ViewIssue)
+ m.Group("/{type:issues|pulls}/{index}/content-history", func() {
+ m.Get("/overview", repo.GetContentHistoryOverview)
+ m.Get("/list", repo.GetContentHistoryList)
+ m.Get("/detail", repo.GetContentHistoryDetail)
+ })
+ m.Get("/labels", reqRepoIssuesOrPullsReader, repo.RetrieveLabels, repo.Labels)
+ m.Get("/milestones", reqRepoIssuesOrPullsReader, repo.Milestones)
+ }, context.RepoRef())
+
+ if setting.Packages.Enabled {
+ m.Get("/packages", repo.Packages)
+ }
+
+ if setting.Badges.Enabled {
+ m.Group("/badges", func() {
+ m.Get("/workflows/{workflow_name}/badge.svg", badges.GetWorkflowBadge)
+ m.Group("/issues", func() {
+ m.Get(".svg", badges.GetTotalIssuesBadge)
+ m.Get("/open.svg", badges.GetOpenIssuesBadge)
+ m.Get("/closed.svg", badges.GetClosedIssuesBadge)
+ })
+ m.Group("/pulls", func() {
+ m.Get(".svg", badges.GetTotalPullsBadge)
+ m.Get("/open.svg", badges.GetOpenPullsBadge)
+ m.Get("/closed.svg", badges.GetClosedPullsBadge)
+ })
+ if !setting.Repository.DisableStars {
+ m.Get("/stars.svg", badges.GetStarsBadge)
+ }
+ m.Get("/release.svg", badges.GetLatestReleaseBadge)
+ })
+ }
+
+ m.Group("/projects", func() {
+ m.Get("", repo.Projects)
+ m.Get("/{id}", repo.ViewProject)
+ m.Group("", func() { //nolint:dupl
+ m.Get("/new", repo.RenderNewProject)
+ m.Post("/new", web.Bind(forms.CreateProjectForm{}), repo.NewProjectPost)
+ m.Group("/{id}", func() {
+ m.Post("", web.Bind(forms.EditProjectColumnForm{}), repo.AddColumnToProjectPost)
+ m.Post("/move", project.MoveColumns)
+ m.Post("/delete", repo.DeleteProject)
+
+ m.Get("/edit", repo.RenderEditProject)
+ m.Post("/edit", web.Bind(forms.CreateProjectForm{}), repo.EditProjectPost)
+ m.Post("/{action:open|close}", repo.ChangeProjectStatus)
+
+ m.Group("/{columnID}", func() {
+ m.Put("", web.Bind(forms.EditProjectColumnForm{}), repo.EditProjectColumn)
+ m.Delete("", repo.DeleteProjectColumn)
+ m.Post("/default", repo.SetDefaultProjectColumn)
+
+ m.Post("/move", repo.MoveIssues)
+ })
+ })
+ }, reqRepoProjectsWriter, context.RepoMustNotBeArchived())
+ }, reqRepoProjectsReader, repo.MustEnableProjects)
+
+ m.Group("/actions", func() {
+ m.Get("", actions.List)
+ m.Post("/disable", reqRepoAdmin, actions.DisableWorkflowFile)
+ m.Post("/enable", reqRepoAdmin, actions.EnableWorkflowFile)
+ m.Post("/manual", reqRepoAdmin, actions.ManualRunWorkflow)
+
+ m.Group("/runs", func() {
+ m.Get("/latest", actions.ViewLatest)
+ m.Group("/{run}", func() {
+ m.Combo("").
+ Get(actions.View).
+ Post(web.Bind(actions.ViewRequest{}), actions.ViewPost)
+ m.Group("/jobs/{job}", func() {
+ m.Combo("").
+ Get(actions.View).
+ Post(web.Bind(actions.ViewRequest{}), actions.ViewPost)
+ m.Post("/rerun", reqRepoActionsWriter, actions.Rerun)
+ m.Get("/logs", actions.Logs)
+ })
+ m.Post("/cancel", reqRepoActionsWriter, actions.Cancel)
+ m.Post("/approve", reqRepoActionsWriter, actions.Approve)
+ m.Get("/artifacts", actions.ArtifactsView)
+ m.Get("/artifacts/{artifact_name}", actions.ArtifactsDownloadView)
+ m.Delete("/artifacts/{artifact_name}", reqRepoActionsWriter, actions.ArtifactsDeleteView)
+ m.Post("/rerun", reqRepoActionsWriter, actions.Rerun)
+ })
+ })
+
+ m.Group("/workflows/{workflow_name}", func() {
+ m.Get("/badge.svg", badges.GetWorkflowBadge)
+ m.Get("/runs/latest", actions.ViewLatestWorkflowRun)
+ })
+ }, reqRepoActionsReader, actions.MustEnableActions)
+
+ m.Group("/wiki", func() {
+ m.Combo("/").
+ Get(repo.Wiki).
+ Post(context.RepoMustNotBeArchived(), reqSignIn, reqRepoWikiWriter, web.Bind(forms.NewWikiForm{}), context.EnforceQuotaWeb(quota_model.LimitSubjectSizeWiki, context.QuotaTargetRepo), repo.WikiPost)
+ m.Combo("/*").
+ Get(repo.Wiki).
+ Post(context.RepoMustNotBeArchived(), reqSignIn, reqRepoWikiWriter, web.Bind(forms.NewWikiForm{}), context.EnforceQuotaWeb(quota_model.LimitSubjectSizeWiki, context.QuotaTargetRepo), repo.WikiPost)
+ m.Get("/commit/{sha:[a-f0-9]{4,64}}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.Diff)
+ m.Get("/commit/{sha:[a-f0-9]{4,64}}.{ext:patch|diff}", repo.RawDiff)
+ }, repo.MustEnableWiki, func(ctx *context.Context) {
+ ctx.Data["PageIsWiki"] = true
+ ctx.Data["CloneButtonOriginLink"] = ctx.Repo.Repository.WikiCloneLink()
+ })
+
+ m.Group("/wiki", func() {
+ m.Get("/search", repo.WikiSearchContent)
+ m.Get("/raw/*", repo.WikiRaw)
+ }, repo.MustEnableWiki)
+
+ m.Group("/activity", func() {
+ m.Get("", repo.Activity)
+ m.Get("/{period}", repo.Activity)
+ m.Group("/contributors", func() {
+ m.Get("", repo.Contributors)
+ m.Get("/data", repo.ContributorsData)
+ }, repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
+ m.Group("/code-frequency", func() {
+ m.Get("", repo.CodeFrequency)
+ m.Get("/data", repo.CodeFrequencyData)
+ }, repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
+ m.Group("/recent-commits", func() {
+ m.Get("", repo.RecentCommits)
+ m.Get("/data", repo.RecentCommitsData)
+ }, repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
+ }, context.RepoRef(), context.RequireRepoReaderOr(unit.TypeCode, unit.TypePullRequests, unit.TypeIssues, unit.TypeReleases))
+
+ m.Group("/activity_author_data", func() {
+ m.Get("", repo.ActivityAuthors)
+ m.Get("/{period}", repo.ActivityAuthors)
+ }, context.RepoRef(), repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
+
+ m.Group("/archive", func() {
+ m.Get("/*", repo.Download)
+ m.Post("/*", repo.InitiateDownload)
+ }, repo.MustBeNotEmpty, dlSourceEnabled, reqRepoCodeReader)
+
+ m.Group("/branches", func() {
+ m.Get("/list", repo.GetBranchesList)
+ m.Get("", repo.Branches)
+ }, repo.MustBeNotEmpty, context.RepoRef(), reqRepoCodeReader)
+
+ m.Group("/blob_excerpt", func() {
+ m.Get("/{sha}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.ExcerptBlob)
+ }, func(ctx *context.Context) gocontext.CancelFunc {
+ if ctx.FormBool("wiki") {
+ ctx.Data["PageIsWiki"] = true
+ repo.MustEnableWiki(ctx)
+ return nil
+ }
+
+ reqRepoCodeReader(ctx)
+ if ctx.Written() {
+ return nil
+ }
+ cancel := context.RepoRef()(ctx)
+ if ctx.Written() {
+ return cancel
+ }
+
+ repo.MustBeNotEmpty(ctx)
+ return cancel
+ })
+
+ m.Get("/pulls/posters", repo.PullPosters)
+ m.Group("/pulls/{index}", func() {
+ m.Get("", repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewIssue)
+ m.Get(".diff", repo.DownloadPullDiff)
+ m.Get(".patch", repo.DownloadPullPatch)
+ m.Group("/commits", func() {
+ m.Get("", context.RepoRef(), repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
+ m.Get("/list", context.RepoRef(), repo.GetPullCommits)
+ m.Get("/{sha:[a-f0-9]{4,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
+ })
+ m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), context.EnforceQuotaWeb(quota_model.LimitSubjectSizeGitAll, context.QuotaTargetRepo), repo.MergePullRequest)
+ m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
+ m.Post("/update", repo.UpdatePullRequest)
+ m.Post("/set_allow_maintainer_edit", web.Bind(forms.UpdateAllowEditsForm{}), repo.SetAllowEdits)
+ m.Post("/cleanup", context.RepoMustNotBeArchived(), context.RepoRef(), repo.CleanUpPullRequest)
+ m.Group("/files", func() {
+ m.Get("", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForAllCommitsOfPr)
+ m.Get("/{sha:[a-f0-9]{4,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesStartingFromCommit)
+ m.Get("/{shaFrom:[a-f0-9]{4,40}}..{shaTo:[a-f0-9]{4,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForRange)
+ m.Group("/reviews", func() {
+ m.Get("/new_comment", repo.RenderNewCodeCommentForm)
+ m.Post("/comments", web.Bind(forms.CodeCommentForm{}), repo.SetShowOutdatedComments, repo.CreateCodeComment)
+ m.Post("/submit", web.Bind(forms.SubmitReviewForm{}), repo.SubmitReview)
+ }, context.RepoMustNotBeArchived())
+ })
+ }, repo.MustAllowPulls)
+
+ m.Group("/media", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.SingleDownloadOrLFS)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.SingleDownloadOrLFS)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.SingleDownloadOrLFS)
+ m.Get("/blob/{sha}", context.RepoRefByType(context.RepoRefBlob), repo.DownloadByIDOrLFS)
+ // "/*" route is deprecated, and kept for backward compatibility
+ m.Get("/*", context.RepoRefByType(context.RepoRefLegacy), repo.SingleDownloadOrLFS)
+ }, repo.MustBeNotEmpty, reqRepoCodeReader)
+
+ m.Group("/raw", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.SingleDownload)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.SingleDownload)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.SingleDownload)
+ m.Get("/blob/{sha}", context.RepoRefByType(context.RepoRefBlob), repo.DownloadByID)
+ // "/*" route is deprecated, and kept for backward compatibility
+ m.Get("/*", context.RepoRefByType(context.RepoRefLegacy), repo.SingleDownload)
+ }, repo.MustBeNotEmpty, reqRepoCodeReader)
+
+ m.Group("/render", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.RenderFile)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.RenderFile)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.RenderFile)
+ m.Get("/blob/{sha}", context.RepoRefByType(context.RepoRefBlob), repo.RenderFile)
+ }, repo.MustBeNotEmpty, reqRepoCodeReader)
+
+ m.Group("/commits", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.RefCommits)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.RefCommits)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.RefCommits)
+ // "/*" route is deprecated, and kept for backward compatibility
+ m.Get("/*", context.RepoRefByType(context.RepoRefLegacy), repo.RefCommits)
+ }, repo.MustBeNotEmpty, reqRepoCodeReader)
+
+ m.Group("/blame", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.RefBlame)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.RefBlame)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.RefBlame)
+ }, repo.MustBeNotEmpty, reqRepoCodeReader)
+
+ m.Group("", func() {
+ m.Get("/graph", repo.Graph)
+ m.Get("/commit/{sha:([a-f0-9]{4,64})$}", repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.Diff)
+ m.Get("/commit/{sha:([a-f0-9]{4,64})$}/load-branches-and-tags", repo.LoadBranchesAndTags)
+ m.Get("/cherry-pick/{sha:([a-f0-9]{4,64})$}", repo.SetEditorconfigIfExists, repo.CherryPick)
+ }, repo.MustBeNotEmpty, context.RepoRef(), reqRepoCodeReader)
+
+ m.Get("/rss/branch/*", repo.MustBeNotEmpty, context.RepoRefByType(context.RepoRefBranch), feedEnabled, feed.RenderBranchFeed("rss"))
+ m.Get("/atom/branch/*", repo.MustBeNotEmpty, context.RepoRefByType(context.RepoRefBranch), feedEnabled, feed.RenderBranchFeed("atom"))
+
+ m.Group("/src", func() {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.Home)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.Home)
+ m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.Home)
+ // "/*" route is deprecated, and kept for backward compatibility
+ m.Get("/*", context.RepoRefByType(context.RepoRefLegacy), repo.Home)
+ }, repo.SetEditorconfigIfExists)
+
+ if !setting.Repository.DisableForks {
+ m.Group("", func() {
+ m.Get("/forks", repo.Forks)
+ }, context.RepoRef(), reqRepoCodeReader)
+ }
+ m.Get("/commit/{sha:([a-f0-9]{4,64})}.{ext:patch|diff}", repo.MustBeNotEmpty, reqRepoCodeReader, repo.RawDiff)
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes())
+
+ m.Post("/{username}/{reponame}/lastcommit/*", ignSignInAndCsrf, context.RepoAssignment, context.UnitTypes(), context.RepoRefByType(context.RepoRefCommit), reqRepoCodeReader, repo.LastCommit)
+
+ m.Group("/{username}/{reponame}", func() {
+ if !setting.Repository.DisableStars {
+ m.Get("/stars", context.RepoRef(), repo.Stars)
+ }
+ m.Get("/watchers", context.RepoRef(), repo.Watchers)
+ m.Group("/search", func() {
+ m.Get("", context.RepoRef(), repo.Search)
+ if !setting.Indexer.RepoIndexerEnabled {
+ m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.Search)
+ m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.Search)
+ }
+ }, reqRepoCodeReader)
+ }, ignSignIn, context.RepoAssignment, context.UnitTypes())
+
+ m.Group("/{username}", func() {
+ m.Group("/{reponame}", func() {
+ m.Get("", repo.SetEditorconfigIfExists, repo.Home)
+ }, ignSignIn, context.RepoAssignment, context.RepoRef(), context.UnitTypes())
+
+ m.Group("/{reponame}", func() {
+ m.Group("/info/lfs", func() {
+ m.Post("/objects/batch", lfs.CheckAcceptMediaType, lfs.BatchHandler)
+ m.Put("/objects/{oid}/{size}", lfs.UploadHandler)
+ m.Get("/objects/{oid}/{filename}", lfs.DownloadHandler)
+ m.Get("/objects/{oid}", lfs.DownloadHandler)
+ m.Post("/verify", lfs.CheckAcceptMediaType, lfs.VerifyHandler)
+ m.Group("/locks", func() {
+ m.Get("/", lfs.GetListLockHandler)
+ m.Post("/", lfs.PostLockHandler)
+ m.Post("/verify", lfs.VerifyLockHandler)
+ m.Post("/{lid}/unlock", lfs.UnLockHandler)
+ }, lfs.CheckAcceptMediaType)
+ m.Any("/*", func(ctx *context.Context) {
+ ctx.NotFound("", nil)
+ })
+ }, ignSignInAndCsrf, lfsServerEnabled)
+
+ gitHTTPRouters(m)
+ })
+ })
+
+ if setting.Repository.EnableFlags {
+ m.Group("/{username}/{reponame}/flags", func() {
+ m.Get("", repo_flags.Manage)
+ m.Post("", repo_flags.ManagePost)
+ }, adminReq, context.RepoAssignment, context.UnitTypes())
+ }
+ // ***** END: Repository *****
+
+ m.Group("/notifications", func() {
+ m.Get("", user.Notifications)
+ m.Get("/subscriptions", user.NotificationSubscriptions)
+ m.Get("/watching", user.NotificationWatching)
+ m.Post("/status", user.NotificationStatusPost)
+ m.Post("/purge", user.NotificationPurgePost)
+ m.Get("/new", user.NewAvailable)
+ }, reqSignIn)
+
+ if setting.API.EnableSwagger {
+ m.Get("/swagger.v1.json", SwaggerV1Json)
+ }
+
+ if !setting.IsProd {
+ m.Any("/devtest", devtest.List)
+ m.Any("/devtest/fetch-action-test", devtest.FetchActionTest)
+ m.Any("/devtest/{sub}", devtest.Tmpl)
+ }
+
+ m.NotFound(func(w http.ResponseWriter, req *http.Request) {
+ ctx := context.GetWebContext(req)
+ ctx.NotFound("", nil)
+ })
+}
diff --git a/routers/web/webfinger.go b/routers/web/webfinger.go
new file mode 100644
index 0000000..1f3de70
--- /dev/null
+++ b/routers/web/webfinger.go
@@ -0,0 +1,167 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package web
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/context"
+)
+
+// https://datatracker.ietf.org/doc/html/draft-ietf-appsawg-webfinger-14#section-4.4
+
+type webfingerJRD struct {
+ Subject string `json:"subject,omitempty"`
+ Aliases []string `json:"aliases,omitempty"`
+ Properties map[string]any `json:"properties,omitempty"`
+ Links []*webfingerLink `json:"links,omitempty"`
+}
+
+type webfingerLink struct {
+ Rel string `json:"rel,omitempty"`
+ Type string `json:"type,omitempty"`
+ Href string `json:"href,omitempty"`
+ Titles map[string]string `json:"titles,omitempty"`
+ Properties map[string]any `json:"properties,omitempty"`
+}
+
+// WebfingerQuery returns information about a resource
+// https://datatracker.ietf.org/doc/html/rfc7565
+func WebfingerQuery(ctx *context.Context) {
+ appURL, _ := url.Parse(setting.AppURL)
+
+ resource, err := url.Parse(ctx.FormTrim("resource"))
+ if err != nil {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ var u *user_model.User
+
+ switch resource.Scheme {
+ case "acct":
+ // allow only the current host
+ parts := strings.SplitN(resource.Opaque, "@", 2)
+ if len(parts) != 2 {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+ if parts[1] != appURL.Host {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ u, err = user_model.GetUserByName(ctx, parts[0])
+ case "mailto":
+ u, err = user_model.GetUserByEmail(ctx, resource.Opaque)
+ if u != nil && u.KeepEmailPrivate {
+ err = user_model.ErrUserNotExist{}
+ }
+ case "https", "http":
+ if resource.Host != appURL.Host {
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+
+ p := strings.Trim(resource.Path, "/")
+ if len(p) == 0 {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ parts := strings.Split(p, "/")
+
+ switch len(parts) {
+ case 1: // user
+ u, err = user_model.GetUserByName(ctx, parts[0])
+ case 2: // repository
+ ctx.Error(http.StatusNotFound)
+ return
+
+ case 3:
+ switch parts[2] {
+ case "issues":
+ ctx.Error(http.StatusNotFound)
+ return
+
+ case "pulls":
+ ctx.Error(http.StatusNotFound)
+ return
+
+ case "projects":
+ ctx.Error(http.StatusNotFound)
+ return
+
+ default:
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ default:
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ default:
+ ctx.Error(http.StatusBadRequest)
+ return
+ }
+ if err != nil {
+ if user_model.IsErrUserNotExist(err) {
+ ctx.Error(http.StatusNotFound)
+ } else {
+ log.Error("Error getting user: %s Error: %v", resource.Opaque, err)
+ ctx.Error(http.StatusInternalServerError)
+ }
+ return
+ }
+
+ if !user_model.IsUserVisibleToViewer(ctx, u, ctx.Doer) {
+ ctx.Error(http.StatusNotFound)
+ return
+ }
+
+ aliases := []string{
+ u.HTMLURL(),
+ appURL.String() + "api/v1/activitypub/user-id/" + fmt.Sprint(u.ID),
+ }
+ if !u.KeepEmailPrivate {
+ aliases = append(aliases, fmt.Sprintf("mailto:%s", u.Email))
+ }
+
+ links := []*webfingerLink{
+ {
+ Rel: "http://webfinger.net/rel/profile-page",
+ Type: "text/html",
+ Href: u.HTMLURL(),
+ },
+ {
+ Rel: "http://webfinger.net/rel/avatar",
+ Href: u.AvatarLink(ctx),
+ },
+ {
+ Rel: "self",
+ Type: "application/activity+json",
+ Href: appURL.String() + "api/v1/activitypub/user-id/" + fmt.Sprint(u.ID),
+ },
+ {
+ Rel: "http://openid.net/specs/connect/1.0/issuer",
+ Href: appURL.String(),
+ },
+ }
+
+ ctx.Resp.Header().Add("Access-Control-Allow-Origin", "*")
+ ctx.JSON(http.StatusOK, &webfingerJRD{
+ Subject: fmt.Sprintf("acct:%s@%s", url.QueryEscape(u.Name), appURL.Host),
+ Aliases: aliases,
+ Links: links,
+ })
+ ctx.Resp.Header().Set("Content-Type", "application/jrd+json")
+}