summaryrefslogtreecommitdiffstats
path: root/modules/setting
diff options
context:
space:
mode:
Diffstat (limited to '')
-rw-r--r--modules/setting/actions.go106
-rw-r--r--modules/setting/actions_test.go157
-rw-r--r--modules/setting/admin.go32
-rw-r--r--modules/setting/admin_test.go33
-rw-r--r--modules/setting/api.go40
-rw-r--r--modules/setting/asset_dynamic.go8
-rw-r--r--modules/setting/asset_static.go8
-rw-r--r--modules/setting/attachment.go35
-rw-r--r--modules/setting/attachment_test.go134
-rw-r--r--modules/setting/badges.go24
-rw-r--r--modules/setting/cache.go85
-rw-r--r--modules/setting/camo.go32
-rw-r--r--modules/setting/config.go98
-rw-r--r--modules/setting/config/getter.go49
-rw-r--r--modules/setting/config/value.go94
-rw-r--r--modules/setting/config_env.go170
-rw-r--r--modules/setting/config_env_test.go151
-rw-r--r--modules/setting/config_provider.go360
-rw-r--r--modules/setting/config_provider_test.go157
-rw-r--r--modules/setting/cors.go34
-rw-r--r--modules/setting/cron.go32
-rw-r--r--modules/setting/cron_test.go44
-rw-r--r--modules/setting/database.go204
-rw-r--r--modules/setting/database_sqlite.go15
-rw-r--r--modules/setting/database_test.go109
-rw-r--r--modules/setting/f3.go26
-rw-r--r--modules/setting/federation.go51
-rw-r--r--modules/setting/forgejo_storage_test.go264
-rw-r--r--modules/setting/git.go123
-rw-r--r--modules/setting/git_test.go66
-rw-r--r--modules/setting/highlight.go17
-rw-r--r--modules/setting/i18n.go68
-rw-r--r--modules/setting/incoming_email.go89
-rw-r--r--modules/setting/incoming_email_test.go74
-rw-r--r--modules/setting/indexer.go119
-rw-r--r--modules/setting/indexer_test.go71
-rw-r--r--modules/setting/lfs.go82
-rw-r--r--modules/setting/lfs_test.go102
-rw-r--r--modules/setting/log.go270
-rw-r--r--modules/setting/log_test.go386
-rw-r--r--modules/setting/mailer.go309
-rw-r--r--modules/setting/mailer_test.go54
-rw-r--r--modules/setting/markup.go192
-rw-r--r--modules/setting/metrics.go21
-rw-r--r--modules/setting/migrations.go28
-rw-r--r--modules/setting/mime_type_map.go28
-rw-r--r--modules/setting/mirror.go58
-rw-r--r--modules/setting/oauth2.go174
-rw-r--r--modules/setting/oauth2_test.go61
-rw-r--r--modules/setting/other.go29
-rw-r--r--modules/setting/packages.go124
-rw-r--r--modules/setting/packages_test.go199
-rw-r--r--modules/setting/path.go214
-rw-r--r--modules/setting/path_test.go243
-rw-r--r--modules/setting/picture.go109
-rw-r--r--modules/setting/project.go19
-rw-r--r--modules/setting/proxy.go37
-rw-r--r--modules/setting/queue.go120
-rw-r--r--modules/setting/quota.go26
-rw-r--r--modules/setting/repository.go376
-rw-r--r--modules/setting/repository_archive.go25
-rw-r--r--modules/setting/repository_archive_test.go112
-rw-r--r--modules/setting/security.go173
-rw-r--r--modules/setting/server.go368
-rw-r--r--modules/setting/server_test.go36
-rw-r--r--modules/setting/service.go262
-rw-r--r--modules/setting/service_test.go133
-rw-r--r--modules/setting/session.go78
-rw-r--r--modules/setting/setting.go238
-rw-r--r--modules/setting/setting_test.go32
-rw-r--r--modules/setting/ssh.go197
-rw-r--r--modules/setting/storage.go275
-rw-r--r--modules/setting/storage_test.go468
-rw-r--r--modules/setting/task.go26
-rw-r--r--modules/setting/time.go28
-rw-r--r--modules/setting/ui.go170
-rw-r--r--modules/setting/webhook.go48
77 files changed, 9109 insertions, 0 deletions
diff --git a/modules/setting/actions.go b/modules/setting/actions.go
new file mode 100644
index 0000000..8c1b57b
--- /dev/null
+++ b/modules/setting/actions.go
@@ -0,0 +1,106 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "strings"
+ "time"
+)
+
+// Actions settings
+var (
+ Actions = struct {
+ Enabled bool
+ LogStorage *Storage // how the created logs should be stored
+ LogRetentionDays int64 `ini:"LOG_RETENTION_DAYS"`
+ LogCompression logCompression `ini:"LOG_COMPRESSION"`
+ ArtifactStorage *Storage // how the created artifacts should be stored
+ ArtifactRetentionDays int64 `ini:"ARTIFACT_RETENTION_DAYS"`
+ DefaultActionsURL defaultActionsURL `ini:"DEFAULT_ACTIONS_URL"`
+ ZombieTaskTimeout time.Duration `ini:"ZOMBIE_TASK_TIMEOUT"`
+ EndlessTaskTimeout time.Duration `ini:"ENDLESS_TASK_TIMEOUT"`
+ AbandonedJobTimeout time.Duration `ini:"ABANDONED_JOB_TIMEOUT"`
+ SkipWorkflowStrings []string `ìni:"SKIP_WORKFLOW_STRINGS"`
+ LimitDispatchInputs int64 `ini:"LIMIT_DISPATCH_INPUTS"`
+ }{
+ Enabled: true,
+ DefaultActionsURL: defaultActionsURLForgejo,
+ SkipWorkflowStrings: []string{"[skip ci]", "[ci skip]", "[no ci]", "[skip actions]", "[actions skip]"},
+ LimitDispatchInputs: 10,
+ }
+)
+
+type defaultActionsURL string
+
+func (url defaultActionsURL) URL() string {
+ switch url {
+ case defaultActionsURLGitHub:
+ return "https://github.com"
+ case defaultActionsURLSelf:
+ return strings.TrimSuffix(AppURL, "/")
+ default:
+ return string(url)
+ }
+}
+
+const (
+ defaultActionsURLForgejo = "https://code.forgejo.org"
+ defaultActionsURLGitHub = "github" // https://github.com
+ defaultActionsURLSelf = "self" // the root URL of the self-hosted instance
+)
+
+type logCompression string
+
+func (c logCompression) IsValid() bool {
+ return c.IsNone() || c.IsZstd()
+}
+
+func (c logCompression) IsNone() bool {
+ return strings.ToLower(string(c)) == "none"
+}
+
+func (c logCompression) IsZstd() bool {
+ return c == "" || strings.ToLower(string(c)) == "zstd"
+}
+
+func loadActionsFrom(rootCfg ConfigProvider) error {
+ sec := rootCfg.Section("actions")
+ err := sec.MapTo(&Actions)
+ if err != nil {
+ return fmt.Errorf("failed to map Actions settings: %v", err)
+ }
+
+ // don't support to read configuration from [actions]
+ Actions.LogStorage, err = getStorage(rootCfg, "actions_log", "", nil)
+ if err != nil {
+ return err
+ }
+ // default to 1 year
+ if Actions.LogRetentionDays <= 0 {
+ Actions.LogRetentionDays = 365
+ }
+
+ actionsSec, _ := rootCfg.GetSection("actions.artifacts")
+
+ Actions.ArtifactStorage, err = getStorage(rootCfg, "actions_artifacts", "", actionsSec)
+ if err != nil {
+ return err
+ }
+
+ // default to 90 days in Github Actions
+ if Actions.ArtifactRetentionDays <= 0 {
+ Actions.ArtifactRetentionDays = 90
+ }
+
+ Actions.ZombieTaskTimeout = sec.Key("ZOMBIE_TASK_TIMEOUT").MustDuration(10 * time.Minute)
+ Actions.EndlessTaskTimeout = sec.Key("ENDLESS_TASK_TIMEOUT").MustDuration(3 * time.Hour)
+ Actions.AbandonedJobTimeout = sec.Key("ABANDONED_JOB_TIMEOUT").MustDuration(24 * time.Hour)
+
+ if !Actions.LogCompression.IsValid() {
+ return fmt.Errorf("invalid [actions] LOG_COMPRESSION: %q", Actions.LogCompression)
+ }
+
+ return nil
+}
diff --git a/modules/setting/actions_test.go b/modules/setting/actions_test.go
new file mode 100644
index 0000000..afd76d3
--- /dev/null
+++ b/modules/setting/actions_test.go
@@ -0,0 +1,157 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getStorageInheritNameSectionTypeForActions(t *testing.T) {
+ iniStr := `
+ [storage]
+ STORAGE_TYPE = minio
+ `
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "minio", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log/", Actions.LogStorage.MinioConfig.BasePath)
+ assert.EqualValues(t, "minio", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts/", Actions.ArtifactStorage.MinioConfig.BasePath)
+
+ iniStr = `
+[storage.actions_log]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "minio", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log/", Actions.LogStorage.MinioConfig.BasePath)
+ assert.EqualValues(t, "local", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts", filepath.Base(Actions.ArtifactStorage.Path))
+
+ iniStr = `
+[storage.actions_log]
+STORAGE_TYPE = my_storage
+
+[storage.my_storage]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "minio", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log/", Actions.LogStorage.MinioConfig.BasePath)
+ assert.EqualValues(t, "local", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts", filepath.Base(Actions.ArtifactStorage.Path))
+
+ iniStr = `
+[storage.actions_artifacts]
+STORAGE_TYPE = my_storage
+
+[storage.my_storage]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "local", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log", filepath.Base(Actions.LogStorage.Path))
+ assert.EqualValues(t, "minio", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts/", Actions.ArtifactStorage.MinioConfig.BasePath)
+
+ iniStr = `
+[storage.actions_artifacts]
+STORAGE_TYPE = my_storage
+
+[storage.my_storage]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "local", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log", filepath.Base(Actions.LogStorage.Path))
+ assert.EqualValues(t, "minio", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts/", Actions.ArtifactStorage.MinioConfig.BasePath)
+
+ iniStr = ``
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, "local", Actions.LogStorage.Type)
+ assert.EqualValues(t, "actions_log", filepath.Base(Actions.LogStorage.Path))
+ assert.EqualValues(t, "local", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "actions_artifacts", filepath.Base(Actions.ArtifactStorage.Path))
+}
+
+func Test_getDefaultActionsURLForActions(t *testing.T) {
+ oldActions := Actions
+ oldAppURL := AppURL
+ defer func() {
+ Actions = oldActions
+ AppURL = oldAppURL
+ }()
+
+ AppURL = "http://test_get_default_actions_url_for_actions:3000/"
+
+ tests := []struct {
+ name string
+ iniStr string
+ wantURL string
+ }{
+ {
+ name: "default",
+ iniStr: `
+[actions]
+`,
+ wantURL: "https://code.forgejo.org",
+ },
+ {
+ name: "github",
+ iniStr: `
+[actions]
+DEFAULT_ACTIONS_URL = github
+`,
+ wantURL: "https://github.com",
+ },
+ {
+ name: "self",
+ iniStr: `
+[actions]
+DEFAULT_ACTIONS_URL = self
+`,
+ wantURL: "http://test_get_default_actions_url_for_actions:3000",
+ },
+ {
+ name: "custom urls",
+ iniStr: `
+[actions]
+DEFAULT_ACTIONS_URL = https://example.com
+`,
+ wantURL: "https://example.com",
+ },
+ }
+
+ for _, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(tt.iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadActionsFrom(cfg))
+
+ assert.EqualValues(t, tt.wantURL, Actions.DefaultActionsURL.URL())
+ })
+ }
+}
diff --git a/modules/setting/admin.go b/modules/setting/admin.go
new file mode 100644
index 0000000..eed3aa2
--- /dev/null
+++ b/modules/setting/admin.go
@@ -0,0 +1,32 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/modules/container"
+)
+
+// Admin settings
+var Admin struct {
+ DisableRegularOrgCreation bool
+ DefaultEmailNotification string
+ SendNotificationEmailOnNewUser bool
+ UserDisabledFeatures container.Set[string]
+ ExternalUserDisableFeatures container.Set[string]
+}
+
+func loadAdminFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("admin")
+ Admin.DisableRegularOrgCreation = sec.Key("DISABLE_REGULAR_ORG_CREATION").MustBool(false)
+ Admin.DefaultEmailNotification = sec.Key("DEFAULT_EMAIL_NOTIFICATIONS").MustString("enabled")
+ Admin.SendNotificationEmailOnNewUser = sec.Key("SEND_NOTIFICATION_EMAIL_ON_NEW_USER").MustBool(false)
+ Admin.UserDisabledFeatures = container.SetOf(sec.Key("USER_DISABLED_FEATURES").Strings(",")...)
+ Admin.ExternalUserDisableFeatures = container.SetOf(sec.Key("EXTERNAL_USER_DISABLE_FEATURES").Strings(",")...)
+}
+
+const (
+ UserFeatureDeletion = "deletion"
+ UserFeatureManageSSHKeys = "manage_ssh_keys"
+ UserFeatureManageGPGKeys = "manage_gpg_keys"
+)
diff --git a/modules/setting/admin_test.go b/modules/setting/admin_test.go
new file mode 100644
index 0000000..0c6c24b
--- /dev/null
+++ b/modules/setting/admin_test.go
@@ -0,0 +1,33 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/container"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_loadAdminFrom(t *testing.T) {
+ iniStr := `
+ [admin]
+ DISABLE_REGULAR_ORG_CREATION = true
+ DEFAULT_EMAIL_NOTIFICATIONS = z
+ SEND_NOTIFICATION_EMAIL_ON_NEW_USER = true
+ USER_DISABLED_FEATURES = a,b
+ EXTERNAL_USER_DISABLE_FEATURES = x,y
+ `
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ loadAdminFrom(cfg)
+
+ assert.True(t, Admin.DisableRegularOrgCreation)
+ assert.EqualValues(t, "z", Admin.DefaultEmailNotification)
+ assert.True(t, Admin.SendNotificationEmailOnNewUser)
+ assert.EqualValues(t, container.SetOf("a", "b"), Admin.UserDisabledFeatures)
+ assert.EqualValues(t, container.SetOf("x", "y"), Admin.ExternalUserDisableFeatures)
+}
diff --git a/modules/setting/api.go b/modules/setting/api.go
new file mode 100644
index 0000000..c36f05c
--- /dev/null
+++ b/modules/setting/api.go
@@ -0,0 +1,40 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/url"
+ "path"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// API settings
+var API = struct {
+ EnableSwagger bool
+ SwaggerURL string
+ MaxResponseItems int
+ DefaultPagingNum int
+ DefaultGitTreesPerPage int
+ DefaultMaxBlobSize int64
+}{
+ EnableSwagger: true,
+ SwaggerURL: "",
+ MaxResponseItems: 50,
+ DefaultPagingNum: 30,
+ DefaultGitTreesPerPage: 1000,
+ DefaultMaxBlobSize: 10485760,
+}
+
+func loadAPIFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "api", &API)
+
+ defaultAppURL := string(Protocol) + "://" + Domain + ":" + HTTPPort
+ u, err := url.Parse(rootCfg.Section("server").Key("ROOT_URL").MustString(defaultAppURL))
+ if err != nil {
+ log.Fatal("Invalid ROOT_URL '%s': %s", AppURL, err)
+ }
+ u.Path = path.Join(u.Path, "api", "swagger")
+ API.SwaggerURL = u.String()
+}
diff --git a/modules/setting/asset_dynamic.go b/modules/setting/asset_dynamic.go
new file mode 100644
index 0000000..2eb2883
--- /dev/null
+++ b/modules/setting/asset_dynamic.go
@@ -0,0 +1,8 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build !bindata
+
+package setting
+
+const HasBuiltinBindata = false
diff --git a/modules/setting/asset_static.go b/modules/setting/asset_static.go
new file mode 100644
index 0000000..889fca9
--- /dev/null
+++ b/modules/setting/asset_static.go
@@ -0,0 +1,8 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+//go:build bindata
+
+package setting
+
+const HasBuiltinBindata = true
diff --git a/modules/setting/attachment.go b/modules/setting/attachment.go
new file mode 100644
index 0000000..4255ac9
--- /dev/null
+++ b/modules/setting/attachment.go
@@ -0,0 +1,35 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Attachment settings
+var Attachment = struct {
+ Storage *Storage
+ AllowedTypes string
+ MaxSize int64
+ MaxFiles int
+ Enabled bool
+}{
+ Storage: &Storage{},
+ AllowedTypes: ".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip",
+ MaxSize: 2048,
+ MaxFiles: 5,
+ Enabled: true,
+}
+
+func loadAttachmentFrom(rootCfg ConfigProvider) (err error) {
+ sec, _ := rootCfg.GetSection("attachment")
+ if sec == nil {
+ Attachment.Storage, err = getStorage(rootCfg, "attachments", "", nil)
+ return err
+ }
+
+ Attachment.AllowedTypes = sec.Key("ALLOWED_TYPES").MustString(".cpuprofile,.csv,.dmp,.docx,.fodg,.fodp,.fods,.fodt,.gif,.gz,.jpeg,.jpg,.json,.jsonc,.log,.md,.mov,.mp4,.odf,.odg,.odp,.ods,.odt,.patch,.pdf,.png,.pptx,.svg,.tgz,.txt,.webm,.webp,.xls,.xlsx,.zip")
+ Attachment.MaxSize = sec.Key("MAX_SIZE").MustInt64(2048)
+ Attachment.MaxFiles = sec.Key("MAX_FILES").MustInt(5)
+ Attachment.Enabled = sec.Key("ENABLED").MustBool(true)
+
+ Attachment.Storage, err = getStorage(rootCfg, "attachments", "", sec)
+ return err
+}
diff --git a/modules/setting/attachment_test.go b/modules/setting/attachment_test.go
new file mode 100644
index 0000000..f8085c1
--- /dev/null
+++ b/modules/setting/attachment_test.go
@@ -0,0 +1,134 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getStorageCustomType(t *testing.T) {
+ iniStr := `
+[attachment]
+STORAGE_TYPE = my_minio
+MINIO_BUCKET = gitea-attachment
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = my_minio:9000
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+
+ assert.EqualValues(t, "minio", Attachment.Storage.Type)
+ assert.EqualValues(t, "my_minio:9000", Attachment.Storage.MinioConfig.Endpoint)
+ assert.EqualValues(t, "gitea-attachment", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageTypeSectionOverridesStorageSection(t *testing.T) {
+ iniStr := `
+[attachment]
+STORAGE_TYPE = minio
+
+[storage.minio]
+MINIO_BUCKET = gitea-minio
+
+[storage]
+MINIO_BUCKET = gitea
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+
+ assert.EqualValues(t, "minio", Attachment.Storage.Type)
+ assert.EqualValues(t, "gitea-minio", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageSpecificOverridesStorage(t *testing.T) {
+ iniStr := `
+[attachment]
+STORAGE_TYPE = minio
+MINIO_BUCKET = gitea-attachment
+
+[storage.attachments]
+MINIO_BUCKET = gitea
+
+[storage]
+STORAGE_TYPE = local
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+
+ assert.EqualValues(t, "minio", Attachment.Storage.Type)
+ assert.EqualValues(t, "gitea-attachment", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageGetDefaults(t *testing.T) {
+ cfg, err := NewConfigProviderFromData("")
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+
+ // default storage is local, so bucket is empty
+ assert.EqualValues(t, "", Attachment.Storage.MinioConfig.Bucket)
+}
+
+func Test_getStorageInheritNameSectionType(t *testing.T) {
+ iniStr := `
+[storage.attachments]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+
+ assert.EqualValues(t, "minio", Attachment.Storage.Type)
+}
+
+func Test_AttachmentStorage(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[storage]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+ storage := Attachment.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+}
+
+func Test_AttachmentStorage1(t *testing.T) {
+ iniStr := `
+[storage]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+ assert.EqualValues(t, "minio", Attachment.Storage.Type)
+ assert.EqualValues(t, "gitea", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+}
diff --git a/modules/setting/badges.go b/modules/setting/badges.go
new file mode 100644
index 0000000..e0c1cb5
--- /dev/null
+++ b/modules/setting/badges.go
@@ -0,0 +1,24 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "text/template"
+)
+
+// Badges settings
+var Badges = struct {
+ Enabled bool `ini:"ENABLED"`
+ GeneratorURLTemplate string `ini:"GENERATOR_URL_TEMPLATE"`
+ GeneratorURLTemplateTemplate *template.Template `ini:"-"`
+}{
+ Enabled: true,
+ GeneratorURLTemplate: "https://img.shields.io/badge/{{.label}}-{{.text}}-{{.color}}",
+}
+
+func loadBadgesFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "badges", &Badges)
+
+ Badges.GeneratorURLTemplateTemplate = template.Must(template.New("").Parse(Badges.GeneratorURLTemplate))
+}
diff --git a/modules/setting/cache.go b/modules/setting/cache.go
new file mode 100644
index 0000000..bfa6ca0
--- /dev/null
+++ b/modules/setting/cache.go
@@ -0,0 +1,85 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Cache represents cache settings
+type Cache struct {
+ Adapter string
+ Interval int
+ Conn string
+ TTL time.Duration `ini:"ITEM_TTL"`
+}
+
+// CacheService the global cache
+var CacheService = struct {
+ Cache `ini:"cache"`
+
+ LastCommit struct {
+ TTL time.Duration `ini:"ITEM_TTL"`
+ CommitsCount int64
+ } `ini:"cache.last_commit"`
+}{
+ Cache: Cache{
+ Adapter: "memory",
+ Interval: 60,
+ TTL: 16 * time.Hour,
+ },
+ LastCommit: struct {
+ TTL time.Duration `ini:"ITEM_TTL"`
+ CommitsCount int64
+ }{
+ TTL: 8760 * time.Hour,
+ CommitsCount: 1000,
+ },
+}
+
+// MemcacheMaxTTL represents the maximum memcache TTL
+const MemcacheMaxTTL = 30 * 24 * time.Hour
+
+func loadCacheFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("cache")
+ if err := sec.MapTo(&CacheService); err != nil {
+ log.Fatal("Failed to map Cache settings: %v", err)
+ }
+
+ CacheService.Adapter = sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache", "twoqueue"})
+ switch CacheService.Adapter {
+ case "memory":
+ case "redis", "memcache":
+ CacheService.Conn = strings.Trim(sec.Key("HOST").String(), "\" ")
+ case "twoqueue":
+ CacheService.Conn = strings.TrimSpace(sec.Key("HOST").String())
+ if CacheService.Conn == "" {
+ CacheService.Conn = "50000"
+ }
+ default:
+ log.Fatal("Unknown cache adapter: %s", CacheService.Adapter)
+ }
+
+ sec = rootCfg.Section("cache.last_commit")
+ CacheService.LastCommit.CommitsCount = sec.Key("COMMITS_COUNT").MustInt64(1000)
+}
+
+// TTLSeconds returns the TTLSeconds or unix timestamp for memcache
+func (c Cache) TTLSeconds() int64 {
+ if c.Adapter == "memcache" && c.TTL > MemcacheMaxTTL {
+ return time.Now().Add(c.TTL).Unix()
+ }
+ return int64(c.TTL.Seconds())
+}
+
+// LastCommitCacheTTLSeconds returns the TTLSeconds or unix timestamp for memcache
+func LastCommitCacheTTLSeconds() int64 {
+ if CacheService.Adapter == "memcache" && CacheService.LastCommit.TTL > MemcacheMaxTTL {
+ return time.Now().Add(CacheService.LastCommit.TTL).Unix()
+ }
+ return int64(CacheService.LastCommit.TTL.Seconds())
+}
diff --git a/modules/setting/camo.go b/modules/setting/camo.go
new file mode 100644
index 0000000..608ecf8
--- /dev/null
+++ b/modules/setting/camo.go
@@ -0,0 +1,32 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "strconv"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+var Camo = struct {
+ Enabled bool
+ ServerURL string `ini:"SERVER_URL"`
+ HMACKey string `ini:"HMAC_KEY"`
+ Always bool
+}{}
+
+func loadCamoFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "camo", &Camo)
+ if Camo.Enabled {
+ oldValue := rootCfg.Section("camo").Key("ALLWAYS").MustString("")
+ if oldValue != "" {
+ log.Warn("camo.ALLWAYS is deprecated, use camo.ALWAYS instead")
+ Camo.Always, _ = strconv.ParseBool(oldValue)
+ }
+
+ if Camo.ServerURL == "" || Camo.HMACKey == "" {
+ log.Fatal(`Camo settings require "SERVER_URL" and HMAC_KEY`)
+ }
+ }
+}
diff --git a/modules/setting/config.go b/modules/setting/config.go
new file mode 100644
index 0000000..0355857
--- /dev/null
+++ b/modules/setting/config.go
@@ -0,0 +1,98 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "sync"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting/config"
+)
+
+type PictureStruct struct {
+ DisableGravatar *config.Value[bool]
+ EnableFederatedAvatar *config.Value[bool]
+}
+
+type OpenWithEditorApp struct {
+ DisplayName string
+ OpenURL string
+}
+
+type OpenWithEditorAppsType []OpenWithEditorApp
+
+func (t OpenWithEditorAppsType) ToTextareaString() string {
+ ret := ""
+ for _, app := range t {
+ ret += app.DisplayName + " = " + app.OpenURL + "\n"
+ }
+ return ret
+}
+
+func DefaultOpenWithEditorApps() OpenWithEditorAppsType {
+ return OpenWithEditorAppsType{
+ {
+ DisplayName: "VS Code",
+ OpenURL: "vscode://vscode.git/clone?url={url}",
+ },
+ {
+ DisplayName: "VSCodium",
+ OpenURL: "vscodium://vscode.git/clone?url={url}",
+ },
+ {
+ DisplayName: "Intellij IDEA",
+ OpenURL: "jetbrains://idea/checkout/git?idea.required.plugins.id=Git4Idea&checkout.repo={url}",
+ },
+ }
+}
+
+type RepositoryStruct struct {
+ OpenWithEditorApps *config.Value[OpenWithEditorAppsType]
+}
+
+type ConfigStruct struct {
+ Picture *PictureStruct
+ Repository *RepositoryStruct
+}
+
+var (
+ defaultConfig *ConfigStruct
+ defaultConfigOnce sync.Once
+)
+
+func initDefaultConfig() {
+ config.SetCfgSecKeyGetter(&cfgSecKeyGetter{})
+ defaultConfig = &ConfigStruct{
+ Picture: &PictureStruct{
+ DisableGravatar: config.ValueJSON[bool]("picture.disable_gravatar").WithFileConfig(config.CfgSecKey{Sec: "picture", Key: "DISABLE_GRAVATAR"}),
+ EnableFederatedAvatar: config.ValueJSON[bool]("picture.enable_federated_avatar").WithFileConfig(config.CfgSecKey{Sec: "picture", Key: "ENABLE_FEDERATED_AVATAR"}),
+ },
+ Repository: &RepositoryStruct{
+ OpenWithEditorApps: config.ValueJSON[OpenWithEditorAppsType]("repository.open-with.editor-apps"),
+ },
+ }
+}
+
+func Config() *ConfigStruct {
+ defaultConfigOnce.Do(initDefaultConfig)
+ return defaultConfig
+}
+
+type cfgSecKeyGetter struct{}
+
+func (c cfgSecKeyGetter) GetValue(sec, key string) (v string, has bool) {
+ if key == "" {
+ return "", false
+ }
+ cfgSec, err := CfgProvider.GetSection(sec)
+ if err != nil {
+ log.Error("Unable to get config section: %q", sec)
+ return "", false
+ }
+ cfgKey := ConfigSectionKey(cfgSec, key)
+ if cfgKey == nil {
+ return "", false
+ }
+ return cfgKey.Value(), true
+}
diff --git a/modules/setting/config/getter.go b/modules/setting/config/getter.go
new file mode 100644
index 0000000..99f9a47
--- /dev/null
+++ b/modules/setting/config/getter.go
@@ -0,0 +1,49 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package config
+
+import (
+ "context"
+ "sync"
+)
+
+var getterMu sync.RWMutex
+
+type CfgSecKeyGetter interface {
+ GetValue(sec, key string) (v string, has bool)
+}
+
+var cfgSecKeyGetterInternal CfgSecKeyGetter
+
+func SetCfgSecKeyGetter(p CfgSecKeyGetter) {
+ getterMu.Lock()
+ cfgSecKeyGetterInternal = p
+ getterMu.Unlock()
+}
+
+func GetCfgSecKeyGetter() CfgSecKeyGetter {
+ getterMu.RLock()
+ defer getterMu.RUnlock()
+ return cfgSecKeyGetterInternal
+}
+
+type DynKeyGetter interface {
+ GetValue(ctx context.Context, key string) (v string, has bool)
+ GetRevision(ctx context.Context) int
+ InvalidateCache()
+}
+
+var dynKeyGetterInternal DynKeyGetter
+
+func SetDynGetter(p DynKeyGetter) {
+ getterMu.Lock()
+ dynKeyGetterInternal = p
+ getterMu.Unlock()
+}
+
+func GetDynGetter() DynKeyGetter {
+ getterMu.RLock()
+ defer getterMu.RUnlock()
+ return dynKeyGetterInternal
+}
diff --git a/modules/setting/config/value.go b/modules/setting/config/value.go
new file mode 100644
index 0000000..f0ec120
--- /dev/null
+++ b/modules/setting/config/value.go
@@ -0,0 +1,94 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package config
+
+import (
+ "context"
+ "sync"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type CfgSecKey struct {
+ Sec, Key string
+}
+
+type Value[T any] struct {
+ mu sync.RWMutex
+
+ cfgSecKey CfgSecKey
+ dynKey string
+
+ def, value T
+ revision int
+}
+
+func (value *Value[T]) parse(key, valStr string) (v T) {
+ v = value.def
+ if valStr != "" {
+ if err := json.Unmarshal(util.UnsafeStringToBytes(valStr), &v); err != nil {
+ log.Error("Unable to unmarshal json config for key %q, err: %v", key, err)
+ }
+ }
+ return v
+}
+
+func (value *Value[T]) Value(ctx context.Context) (v T) {
+ dg := GetDynGetter()
+ if dg == nil {
+ // this is an edge case: the database is not initialized but the system setting is going to be used
+ // it should panic to avoid inconsistent config values (from config / system setting) and fix the code
+ panic("no config dyn value getter")
+ }
+
+ rev := dg.GetRevision(ctx)
+
+ // if the revision in database doesn't change, use the last value
+ value.mu.RLock()
+ if rev == value.revision {
+ v = value.value
+ value.mu.RUnlock()
+ return v
+ }
+ value.mu.RUnlock()
+
+ // try to parse the config and cache it
+ var valStr *string
+ if dynVal, has := dg.GetValue(ctx, value.dynKey); has {
+ valStr = &dynVal
+ } else if cfgVal, has := GetCfgSecKeyGetter().GetValue(value.cfgSecKey.Sec, value.cfgSecKey.Key); has {
+ valStr = &cfgVal
+ }
+ if valStr == nil {
+ v = value.def
+ } else {
+ v = value.parse(value.dynKey, *valStr)
+ }
+
+ value.mu.Lock()
+ value.value = v
+ value.revision = rev
+ value.mu.Unlock()
+ return v
+}
+
+func (value *Value[T]) DynKey() string {
+ return value.dynKey
+}
+
+func (value *Value[T]) WithDefault(def T) *Value[T] {
+ value.def = def
+ return value
+}
+
+func (value *Value[T]) WithFileConfig(cfgSecKey CfgSecKey) *Value[T] {
+ value.cfgSecKey = cfgSecKey
+ return value
+}
+
+func ValueJSON[T any](dynKey string) *Value[T] {
+ return &Value[T]{dynKey: dynKey}
+}
diff --git a/modules/setting/config_env.go b/modules/setting/config_env.go
new file mode 100644
index 0000000..fa0100d
--- /dev/null
+++ b/modules/setting/config_env.go
@@ -0,0 +1,170 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "bytes"
+ "os"
+ "regexp"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+const (
+ EnvConfigKeyPrefixGitea = "^(FORGEJO|GITEA)__"
+ EnvConfigKeySuffixFile = "__FILE"
+)
+
+const escapeRegexpString = "_0[xX](([0-9a-fA-F][0-9a-fA-F])+)_"
+
+var escapeRegex = regexp.MustCompile(escapeRegexpString)
+
+func CollectEnvConfigKeys() (keys []string) {
+ for _, env := range os.Environ() {
+ if strings.HasPrefix(env, EnvConfigKeyPrefixGitea) {
+ k, _, _ := strings.Cut(env, "=")
+ keys = append(keys, k)
+ }
+ }
+ return keys
+}
+
+func ClearEnvConfigKeys() {
+ for _, k := range CollectEnvConfigKeys() {
+ _ = os.Unsetenv(k)
+ }
+}
+
+// decodeEnvSectionKey will decode a portable string encoded Section__Key pair
+// Portable strings are considered to be of the form [A-Z0-9_]*
+// We will encode a disallowed value as the UTF8 byte string preceded by _0X and
+// followed by _. E.g. _0X2C_ for a '-' and _0X2E_ for '.'
+// Section and Key are separated by a plain '__'.
+// The entire section can be encoded as a UTF8 byte string
+func decodeEnvSectionKey(encoded string) (ok bool, section, key string) {
+ inKey := false
+ last := 0
+ escapeStringIndices := escapeRegex.FindAllStringIndex(encoded, -1)
+ for _, unescapeIdx := range escapeStringIndices {
+ preceding := encoded[last:unescapeIdx[0]]
+ if !inKey {
+ if splitter := strings.Index(preceding, "__"); splitter > -1 {
+ section += preceding[:splitter]
+ inKey = true
+ key += preceding[splitter+2:]
+ } else {
+ section += preceding
+ }
+ } else {
+ key += preceding
+ }
+ toDecode := encoded[unescapeIdx[0]+3 : unescapeIdx[1]-1]
+ decodedBytes := make([]byte, len(toDecode)/2)
+ for i := 0; i < len(toDecode)/2; i++ {
+ // Can ignore error here as we know these should be hexadecimal from the regexp
+ byteInt, _ := strconv.ParseInt(toDecode[2*i:2*i+2], 16, 0)
+ decodedBytes[i] = byte(byteInt)
+ }
+ if inKey {
+ key += string(decodedBytes)
+ } else {
+ section += string(decodedBytes)
+ }
+ last = unescapeIdx[1]
+ }
+ remaining := encoded[last:]
+ if !inKey {
+ if splitter := strings.Index(remaining, "__"); splitter > -1 {
+ section += remaining[:splitter]
+ key += remaining[splitter+2:]
+ } else {
+ section += remaining
+ }
+ } else {
+ key += remaining
+ }
+ section = strings.ToLower(section)
+ ok = key != ""
+ if !ok {
+ section = ""
+ key = ""
+ }
+ return ok, section, key
+}
+
+// decodeEnvironmentKey decode the environment key to section and key
+// The environment key is in the form of GITEA__SECTION__KEY or GITEA__SECTION__KEY__FILE
+func decodeEnvironmentKey(prefixRegexp *regexp.Regexp, suffixFile, envKey string) (ok bool, section, key string, useFileValue bool) { //nolint:unparam
+ if strings.HasSuffix(envKey, suffixFile) {
+ useFileValue = true
+ envKey = envKey[:len(envKey)-len(suffixFile)]
+ }
+ loc := prefixRegexp.FindStringIndex(envKey)
+ if loc == nil {
+ return false, "", "", false
+ }
+ ok, section, key = decodeEnvSectionKey(envKey[loc[1]:])
+ return ok, section, key, useFileValue
+}
+
+func EnvironmentToConfig(cfg ConfigProvider, envs []string) (changed bool) {
+ prefixRegexp := regexp.MustCompile(EnvConfigKeyPrefixGitea)
+ for _, kv := range envs {
+ idx := strings.IndexByte(kv, '=')
+ if idx < 0 {
+ continue
+ }
+
+ // parse the environment variable to config section name and key name
+ envKey := kv[:idx]
+ envValue := kv[idx+1:]
+ ok, sectionName, keyName, useFileValue := decodeEnvironmentKey(prefixRegexp, EnvConfigKeySuffixFile, envKey)
+ if !ok {
+ continue
+ }
+
+ // use environment value as config value, or read the file content as value if the key indicates a file
+ keyValue := envValue
+ if useFileValue {
+ fileContent, err := os.ReadFile(envValue)
+ if err != nil {
+ log.Error("Error reading file for %s : %v", envKey, envValue, err)
+ continue
+ }
+ if bytes.HasSuffix(fileContent, []byte("\r\n")) {
+ fileContent = fileContent[:len(fileContent)-2]
+ } else if bytes.HasSuffix(fileContent, []byte("\n")) {
+ fileContent = fileContent[:len(fileContent)-1]
+ }
+ keyValue = string(fileContent)
+ }
+
+ // try to set the config value if necessary
+ section, err := cfg.GetSection(sectionName)
+ if err != nil {
+ section, err = cfg.NewSection(sectionName)
+ if err != nil {
+ log.Error("Error creating section: %s : %v", sectionName, err)
+ continue
+ }
+ }
+ key := ConfigSectionKey(section, keyName)
+ if key == nil {
+ changed = true
+ key, err = section.NewKey(keyName, keyValue)
+ if err != nil {
+ log.Error("Error creating key: %s in section: %s with value: %s : %v", keyName, sectionName, keyValue, err)
+ continue
+ }
+ }
+ oldValue := key.Value()
+ if !changed && oldValue != keyValue {
+ changed = true
+ }
+ key.SetValue(keyValue)
+ }
+ return changed
+}
diff --git a/modules/setting/config_env_test.go b/modules/setting/config_env_test.go
new file mode 100644
index 0000000..bec3e58
--- /dev/null
+++ b/modules/setting/config_env_test.go
@@ -0,0 +1,151 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os"
+ "regexp"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestDecodeEnvSectionKey(t *testing.T) {
+ ok, section, key := decodeEnvSectionKey("SEC__KEY")
+ assert.True(t, ok)
+ assert.Equal(t, "sec", section)
+ assert.Equal(t, "KEY", key)
+
+ ok, section, key = decodeEnvSectionKey("sec__key")
+ assert.True(t, ok)
+ assert.Equal(t, "sec", section)
+ assert.Equal(t, "key", key)
+
+ ok, section, key = decodeEnvSectionKey("LOG_0x2E_CONSOLE__STDERR")
+ assert.True(t, ok)
+ assert.Equal(t, "log.console", section)
+ assert.Equal(t, "STDERR", key)
+
+ ok, section, key = decodeEnvSectionKey("SEC")
+ assert.False(t, ok)
+ assert.Equal(t, "", section)
+ assert.Equal(t, "", key)
+}
+
+func TestDecodeEnvironmentKey(t *testing.T) {
+ prefix := regexp.MustCompile(EnvConfigKeyPrefixGitea)
+ suffix := "__FILE"
+
+ ok, section, key, file := decodeEnvironmentKey(prefix, suffix, "SEC__KEY")
+ assert.False(t, ok)
+ assert.Equal(t, "", section)
+ assert.Equal(t, "", key)
+ assert.False(t, file)
+
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "GITEA__SEC")
+ assert.False(t, ok)
+ assert.Equal(t, "", section)
+ assert.Equal(t, "", key)
+ assert.False(t, file)
+
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "GITEA____KEY")
+ assert.True(t, ok)
+ assert.Equal(t, "", section)
+ assert.Equal(t, "KEY", key)
+ assert.False(t, file)
+
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "GITEA__SEC__KEY")
+ assert.True(t, ok)
+ assert.Equal(t, "sec", section)
+ assert.Equal(t, "KEY", key)
+ assert.False(t, file)
+
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "FORGEJO__SEC__KEY")
+ assert.True(t, ok)
+ assert.Equal(t, "sec", section)
+ assert.Equal(t, "KEY", key)
+ assert.False(t, file)
+
+ // with "__FILE" suffix, it doesn't support to write "[sec].FILE" to config (no such key FILE is used in Gitea)
+ // but it could be fixed in the future by adding a new suffix like "__VALUE" (no such key VALUE is used in Gitea either)
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "GITEA__SEC__FILE")
+ assert.False(t, ok)
+ assert.Equal(t, "", section)
+ assert.Equal(t, "", key)
+ assert.True(t, file)
+
+ ok, section, key, file = decodeEnvironmentKey(prefix, suffix, "GITEA__SEC__KEY__FILE")
+ assert.True(t, ok)
+ assert.Equal(t, "sec", section)
+ assert.Equal(t, "KEY", key)
+ assert.True(t, file)
+}
+
+func TestEnvironmentToConfig(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData("")
+
+ changed := EnvironmentToConfig(cfg, nil)
+ assert.False(t, changed)
+
+ cfg, err := NewConfigProviderFromData(`
+[sec]
+key = old
+`)
+ require.NoError(t, err)
+
+ changed = EnvironmentToConfig(cfg, []string{"GITEA__sec__key=new"})
+ assert.True(t, changed)
+ assert.Equal(t, "new", cfg.Section("sec").Key("key").String())
+
+ changed = EnvironmentToConfig(cfg, []string{"GITEA__sec__key=new"})
+ assert.False(t, changed)
+
+ tmpFile := t.TempDir() + "/the-file"
+ _ = os.WriteFile(tmpFile, []byte("value-from-file"), 0o644)
+ changed = EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
+ assert.True(t, changed)
+ assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
+
+ cfg, _ = NewConfigProviderFromData("")
+ _ = os.WriteFile(tmpFile, []byte("value-from-file\n"), 0o644)
+ EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
+ assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
+
+ cfg, _ = NewConfigProviderFromData("")
+ _ = os.WriteFile(tmpFile, []byte("value-from-file\r\n"), 0o644)
+ EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
+ assert.Equal(t, "value-from-file", cfg.Section("sec").Key("key").String())
+
+ cfg, _ = NewConfigProviderFromData("")
+ _ = os.WriteFile(tmpFile, []byte("value-from-file\n\n"), 0o644)
+ EnvironmentToConfig(cfg, []string{"GITEA__sec__key__FILE=" + tmpFile})
+ assert.Equal(t, "value-from-file\n", cfg.Section("sec").Key("key").String())
+}
+
+func TestEnvironmentToConfigSubSecKey(t *testing.T) {
+ // the INI package has a quirk: by default, the keys are inherited.
+ // when maintaining the keys, the newly added sub key should not be affected by the parent key.
+ cfg, err := NewConfigProviderFromData(`
+[sec]
+key = some
+`)
+ require.NoError(t, err)
+
+ changed := EnvironmentToConfig(cfg, []string{"GITEA__sec_0X2E_sub__key=some"})
+ assert.True(t, changed)
+
+ tmpFile := t.TempDir() + "/test-sub-sec-key.ini"
+ defer os.Remove(tmpFile)
+ err = cfg.SaveTo(tmpFile)
+ require.NoError(t, err)
+ bs, err := os.ReadFile(tmpFile)
+ require.NoError(t, err)
+ assert.Equal(t, `[sec]
+key = some
+
+[sec.sub]
+key = some
+`, string(bs))
+}
diff --git a/modules/setting/config_provider.go b/modules/setting/config_provider.go
new file mode 100644
index 0000000..12cf36a
--- /dev/null
+++ b/modules/setting/config_provider.go
@@ -0,0 +1,360 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+
+ "gopkg.in/ini.v1" //nolint:depguard
+)
+
+type ConfigKey interface {
+ Name() string
+ Value() string
+ SetValue(v string)
+
+ In(defaultVal string, candidates []string) string
+ String() string
+ Strings(delim string) []string
+
+ MustString(defaultVal string) string
+ MustBool(defaultVal ...bool) bool
+ MustInt(defaultVal ...int) int
+ MustInt64(defaultVal ...int64) int64
+ MustDuration(defaultVal ...time.Duration) time.Duration
+}
+
+type ConfigSection interface {
+ Name() string
+ MapTo(any) error
+ HasKey(key string) bool
+ NewKey(name, value string) (ConfigKey, error)
+ Key(key string) ConfigKey
+ Keys() []ConfigKey
+ ChildSections() []ConfigSection
+}
+
+// ConfigProvider represents a config provider
+type ConfigProvider interface {
+ Section(section string) ConfigSection
+ Sections() []ConfigSection
+ NewSection(name string) (ConfigSection, error)
+ GetSection(name string) (ConfigSection, error)
+ Save() error
+ SaveTo(filename string) error
+
+ GetFile() string
+ DisableSaving()
+ PrepareSaving() (ConfigProvider, error)
+ IsLoadedFromEmpty() bool
+}
+
+type iniConfigProvider struct {
+ file string
+ ini *ini.File
+
+ disableSaving bool // disable the "Save" method because the config options could be polluted
+ loadedFromEmpty bool // whether the file has not existed previously
+}
+
+type iniConfigSection struct {
+ sec *ini.Section
+}
+
+var (
+ _ ConfigProvider = (*iniConfigProvider)(nil)
+ _ ConfigSection = (*iniConfigSection)(nil)
+ _ ConfigKey = (*ini.Key)(nil)
+)
+
+// ConfigSectionKey only searches the keys in the given section, but it is O(n).
+// ini package has a special behavior: with "[sec] a=1" and an empty "[sec.sub]",
+// then in "[sec.sub]", Key()/HasKey() can always see "a=1" because it always tries parent sections.
+// It returns nil if the key doesn't exist.
+func ConfigSectionKey(sec ConfigSection, key string) ConfigKey {
+ if sec == nil {
+ return nil
+ }
+ for _, k := range sec.Keys() {
+ if k.Name() == key {
+ return k
+ }
+ }
+ return nil
+}
+
+func ConfigSectionKeyString(sec ConfigSection, key string, def ...string) string {
+ k := ConfigSectionKey(sec, key)
+ if k != nil && k.String() != "" {
+ return k.String()
+ }
+ if len(def) > 0 {
+ return def[0]
+ }
+ return ""
+}
+
+func ConfigSectionKeyBool(sec ConfigSection, key string, def ...bool) bool {
+ k := ConfigSectionKey(sec, key)
+ if k != nil && k.String() != "" {
+ b, _ := strconv.ParseBool(k.String())
+ return b
+ }
+ if len(def) > 0 {
+ return def[0]
+ }
+ return false
+}
+
+// ConfigInheritedKey works like ini.Section.Key(), but it always returns a new key instance, it is O(n) because NewKey is O(n)
+// and the returned key is safe to be used with "MustXxx", it doesn't change the parent's values.
+// Otherwise, ini.Section.Key().MustXxx would pollute the parent section's keys.
+// It never returns nil.
+func ConfigInheritedKey(sec ConfigSection, key string) ConfigKey {
+ k := sec.Key(key)
+ if k != nil && k.String() != "" {
+ newKey, _ := sec.NewKey(k.Name(), k.String())
+ return newKey
+ }
+ newKey, _ := sec.NewKey(key, "")
+ return newKey
+}
+
+func ConfigInheritedKeyString(sec ConfigSection, key string, def ...string) string {
+ k := sec.Key(key)
+ if k != nil && k.String() != "" {
+ return k.String()
+ }
+ if len(def) > 0 {
+ return def[0]
+ }
+ return ""
+}
+
+func (s *iniConfigSection) Name() string {
+ return s.sec.Name()
+}
+
+func (s *iniConfigSection) MapTo(v any) error {
+ return s.sec.MapTo(v)
+}
+
+func (s *iniConfigSection) HasKey(key string) bool {
+ return s.sec.HasKey(key)
+}
+
+func (s *iniConfigSection) NewKey(name, value string) (ConfigKey, error) {
+ return s.sec.NewKey(name, value)
+}
+
+func (s *iniConfigSection) Key(key string) ConfigKey {
+ return s.sec.Key(key)
+}
+
+func (s *iniConfigSection) Keys() (keys []ConfigKey) {
+ for _, k := range s.sec.Keys() {
+ keys = append(keys, k)
+ }
+ return keys
+}
+
+func (s *iniConfigSection) ChildSections() (sections []ConfigSection) {
+ for _, s := range s.sec.ChildSections() {
+ sections = append(sections, &iniConfigSection{s})
+ }
+ return sections
+}
+
+func configProviderLoadOptions() ini.LoadOptions {
+ return ini.LoadOptions{
+ KeyValueDelimiterOnWrite: " = ",
+ IgnoreContinuation: true,
+ }
+}
+
+// NewConfigProviderFromData this function is mainly for testing purpose
+func NewConfigProviderFromData(configContent string) (ConfigProvider, error) {
+ cfg, err := ini.LoadSources(configProviderLoadOptions(), strings.NewReader(configContent))
+ if err != nil {
+ return nil, err
+ }
+ cfg.NameMapper = ini.SnackCase
+ return &iniConfigProvider{
+ ini: cfg,
+ loadedFromEmpty: true,
+ }, nil
+}
+
+// NewConfigProviderFromFile load configuration from file.
+// NOTE: do not print any log except error.
+func NewConfigProviderFromFile(file string) (ConfigProvider, error) {
+ cfg := ini.Empty(configProviderLoadOptions())
+ loadedFromEmpty := true
+
+ if file != "" {
+ isFile, err := util.IsFile(file)
+ if err != nil {
+ return nil, fmt.Errorf("unable to check if %q is a file. Error: %v", file, err)
+ }
+ if isFile {
+ if err = cfg.Append(file); err != nil {
+ return nil, fmt.Errorf("failed to load config file %q: %v", file, err)
+ }
+ loadedFromEmpty = false
+ }
+ }
+
+ cfg.NameMapper = ini.SnackCase
+ return &iniConfigProvider{
+ file: file,
+ ini: cfg,
+ loadedFromEmpty: loadedFromEmpty,
+ }, nil
+}
+
+func (p *iniConfigProvider) Section(section string) ConfigSection {
+ return &iniConfigSection{sec: p.ini.Section(section)}
+}
+
+func (p *iniConfigProvider) Sections() (sections []ConfigSection) {
+ for _, s := range p.ini.Sections() {
+ sections = append(sections, &iniConfigSection{s})
+ }
+ return sections
+}
+
+func (p *iniConfigProvider) NewSection(name string) (ConfigSection, error) {
+ sec, err := p.ini.NewSection(name)
+ if err != nil {
+ return nil, err
+ }
+ return &iniConfigSection{sec: sec}, nil
+}
+
+func (p *iniConfigProvider) GetSection(name string) (ConfigSection, error) {
+ sec, err := p.ini.GetSection(name)
+ if err != nil {
+ return nil, err
+ }
+ return &iniConfigSection{sec: sec}, nil
+}
+
+var errDisableSaving = errors.New("this config can't be saved, developers should prepare a new config to save")
+
+func (p *iniConfigProvider) GetFile() string {
+ return p.file
+}
+
+// Save saves the content into file
+func (p *iniConfigProvider) Save() error {
+ if p.disableSaving {
+ return errDisableSaving
+ }
+ filename := p.file
+ if filename == "" {
+ return fmt.Errorf("config file path must not be empty")
+ }
+ if p.loadedFromEmpty {
+ if err := os.MkdirAll(filepath.Dir(filename), os.ModePerm); err != nil {
+ return fmt.Errorf("failed to create %q: %v", filename, err)
+ }
+ }
+ if err := p.ini.SaveTo(filename); err != nil {
+ return fmt.Errorf("failed to save %q: %v", filename, err)
+ }
+
+ // Change permissions to be more restrictive
+ fi, err := os.Stat(filename)
+ if err != nil {
+ return fmt.Errorf("failed to determine current conf file permissions: %v", err)
+ }
+
+ if fi.Mode().Perm() > 0o600 {
+ if err = os.Chmod(filename, 0o600); err != nil {
+ log.Warn("Failed changing conf file permissions to -rw-------. Consider changing them manually.")
+ }
+ }
+ return nil
+}
+
+func (p *iniConfigProvider) SaveTo(filename string) error {
+ if p.disableSaving {
+ return errDisableSaving
+ }
+ return p.ini.SaveTo(filename)
+}
+
+// DisableSaving disables the saving function, use PrepareSaving to get clear config options.
+func (p *iniConfigProvider) DisableSaving() {
+ p.disableSaving = true
+}
+
+// PrepareSaving loads the ini from file again to get clear config options.
+// Otherwise, the "MustXxx" calls would have polluted the current config provider,
+// it makes the "Save" outputs a lot of garbage options
+// After the INI package gets refactored, no "MustXxx" pollution, this workaround can be dropped.
+func (p *iniConfigProvider) PrepareSaving() (ConfigProvider, error) {
+ if p.file == "" {
+ return nil, errors.New("no config file to save")
+ }
+ return NewConfigProviderFromFile(p.file)
+}
+
+func (p *iniConfigProvider) IsLoadedFromEmpty() bool {
+ return p.loadedFromEmpty
+}
+
+func mustMapSetting(rootCfg ConfigProvider, sectionName string, setting any) {
+ if err := rootCfg.Section(sectionName).MapTo(setting); err != nil {
+ log.Fatal("Failed to map %s settings: %v", sectionName, err)
+ }
+}
+
+// DeprecatedWarnings contains the warning message for various deprecations, including: setting option, file/folder, etc
+var DeprecatedWarnings []string
+
+func deprecatedSetting(rootCfg ConfigProvider, oldSection, oldKey, newSection, newKey, version string) {
+ if rootCfg.Section(oldSection).HasKey(oldKey) {
+ msg := fmt.Sprintf("Deprecated config option `[%s]` `%s` present. Use `[%s]` `%s` instead. This fallback will be/has been removed in %s", oldSection, oldKey, newSection, newKey, version)
+ log.Error("%v", msg)
+ DeprecatedWarnings = append(DeprecatedWarnings, msg)
+ }
+}
+
+// deprecatedSettingDB add a hint that the configuration has been moved to database but still kept in app.ini
+func deprecatedSettingDB(rootCfg ConfigProvider, oldSection, oldKey string) {
+ if rootCfg.Section(oldSection).HasKey(oldKey) {
+ log.Error("Deprecated `[%s]` `%s` present which has been copied to database table sys_setting", oldSection, oldKey)
+ }
+}
+
+// NewConfigProviderForLocale loads locale configuration from source and others. "string" if for a local file path, "[]byte" is for INI content
+func NewConfigProviderForLocale(source any, others ...any) (ConfigProvider, error) {
+ iniFile, err := ini.LoadSources(ini.LoadOptions{
+ IgnoreInlineComment: true,
+ UnescapeValueCommentSymbols: true,
+ IgnoreContinuation: true,
+ }, source, others...)
+ if err != nil {
+ return nil, fmt.Errorf("unable to load locale ini: %w", err)
+ }
+ iniFile.BlockMode = false
+ return &iniConfigProvider{
+ ini: iniFile,
+ loadedFromEmpty: true,
+ }, nil
+}
+
+func init() {
+ ini.PrettyFormat = false
+}
diff --git a/modules/setting/config_provider_test.go b/modules/setting/config_provider_test.go
new file mode 100644
index 0000000..702be80
--- /dev/null
+++ b/modules/setting/config_provider_test.go
@@ -0,0 +1,157 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestConfigProviderBehaviors(t *testing.T) {
+ t.Run("BuggyKeyOverwritten", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+key =
+`)
+ sec := cfg.Section("foo")
+ secSub := cfg.Section("foo.bar")
+ secSub.Key("key").MustString("1") // try to read a key from subsection
+ assert.Equal(t, "1", sec.Key("key").String()) // TODO: BUGGY! the key in [foo] is overwritten
+ })
+
+ t.Run("SubsectionSeeParentKeys", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+key = 123
+`)
+ secSub := cfg.Section("foo.bar.xxx")
+ assert.Equal(t, "123", secSub.Key("key").String())
+ })
+ t.Run("TrailingSlash", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+key = E:\
+xxx = yyy
+`)
+ sec := cfg.Section("foo")
+ assert.Equal(t, "E:\\", sec.Key("key").String())
+ assert.Equal(t, "yyy", sec.Key("xxx").String())
+ })
+}
+
+func TestConfigProviderHelper(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData(`
+[foo]
+empty =
+key = 123
+`)
+
+ sec := cfg.Section("foo")
+ secSub := cfg.Section("foo.bar")
+
+ // test empty key
+ assert.Equal(t, "def", ConfigSectionKeyString(sec, "empty", "def"))
+ assert.Equal(t, "xyz", ConfigSectionKeyString(secSub, "empty", "xyz"))
+
+ // test non-inherited key, only see the keys in current section
+ assert.NotNil(t, ConfigSectionKey(sec, "key"))
+ assert.Nil(t, ConfigSectionKey(secSub, "key"))
+
+ // test default behavior
+ assert.Equal(t, "123", ConfigSectionKeyString(sec, "key"))
+ assert.Equal(t, "", ConfigSectionKeyString(secSub, "key"))
+ assert.Equal(t, "def", ConfigSectionKeyString(secSub, "key", "def"))
+
+ assert.Equal(t, "123", ConfigInheritedKeyString(secSub, "key"))
+
+ // Workaround for ini package's BuggyKeyOverwritten behavior
+ assert.Equal(t, "", ConfigSectionKeyString(sec, "empty"))
+ assert.Equal(t, "", ConfigSectionKeyString(secSub, "empty"))
+ assert.Equal(t, "def", ConfigInheritedKey(secSub, "empty").MustString("def"))
+ assert.Equal(t, "def", ConfigInheritedKey(secSub, "empty").MustString("xyz"))
+ assert.Equal(t, "", ConfigSectionKeyString(sec, "empty"))
+ assert.Equal(t, "def", ConfigSectionKeyString(secSub, "empty"))
+}
+
+func TestNewConfigProviderFromFile(t *testing.T) {
+ cfg, err := NewConfigProviderFromFile("no-such.ini")
+ require.NoError(t, err)
+ assert.True(t, cfg.IsLoadedFromEmpty())
+
+ // load non-existing file and save
+ testFile := t.TempDir() + "/test.ini"
+ testFile1 := t.TempDir() + "/test1.ini"
+ cfg, err = NewConfigProviderFromFile(testFile)
+ require.NoError(t, err)
+
+ sec, _ := cfg.NewSection("foo")
+ _, _ = sec.NewKey("k1", "a")
+ require.NoError(t, cfg.Save())
+ _, _ = sec.NewKey("k2", "b")
+ require.NoError(t, cfg.SaveTo(testFile1))
+
+ bs, err := os.ReadFile(testFile)
+ require.NoError(t, err)
+ assert.Equal(t, "[foo]\nk1 = a\n", string(bs))
+
+ bs, err = os.ReadFile(testFile1)
+ require.NoError(t, err)
+ assert.Equal(t, "[foo]\nk1 = a\nk2 = b\n", string(bs))
+
+ // load existing file and save
+ cfg, err = NewConfigProviderFromFile(testFile)
+ require.NoError(t, err)
+ assert.Equal(t, "a", cfg.Section("foo").Key("k1").String())
+ sec, _ = cfg.NewSection("bar")
+ _, _ = sec.NewKey("k1", "b")
+ require.NoError(t, cfg.Save())
+ bs, err = os.ReadFile(testFile)
+ require.NoError(t, err)
+ assert.Equal(t, "[foo]\nk1 = a\n\n[bar]\nk1 = b\n", string(bs))
+}
+
+func TestNewConfigProviderForLocale(t *testing.T) {
+ // load locale from file
+ localeFile := t.TempDir() + "/locale.ini"
+ _ = os.WriteFile(localeFile, []byte(`k1=a`), 0o644)
+ cfg, err := NewConfigProviderForLocale(localeFile)
+ require.NoError(t, err)
+ assert.Equal(t, "a", cfg.Section("").Key("k1").String())
+
+ // load locale from bytes
+ cfg, err = NewConfigProviderForLocale([]byte("k1=foo\nk2=bar"))
+ require.NoError(t, err)
+ assert.Equal(t, "foo", cfg.Section("").Key("k1").String())
+ cfg, err = NewConfigProviderForLocale([]byte("k1=foo\nk2=bar"), []byte("k2=xxx"))
+ require.NoError(t, err)
+ assert.Equal(t, "foo", cfg.Section("").Key("k1").String())
+ assert.Equal(t, "xxx", cfg.Section("").Key("k2").String())
+}
+
+func TestDisableSaving(t *testing.T) {
+ testFile := t.TempDir() + "/test.ini"
+ _ = os.WriteFile(testFile, []byte("k1=a\nk2=b"), 0o644)
+ cfg, err := NewConfigProviderFromFile(testFile)
+ require.NoError(t, err)
+
+ cfg.DisableSaving()
+ err = cfg.Save()
+ require.ErrorIs(t, err, errDisableSaving)
+
+ saveCfg, err := cfg.PrepareSaving()
+ require.NoError(t, err)
+
+ saveCfg.Section("").Key("k1").MustString("x")
+ saveCfg.Section("").Key("k2").SetValue("y")
+ saveCfg.Section("").Key("k3").SetValue("z")
+ err = saveCfg.Save()
+ require.NoError(t, err)
+
+ bs, err := os.ReadFile(testFile)
+ require.NoError(t, err)
+ assert.Equal(t, "k1 = a\nk2 = y\nk3 = z\n", string(bs))
+}
diff --git a/modules/setting/cors.go b/modules/setting/cors.go
new file mode 100644
index 0000000..63daaad
--- /dev/null
+++ b/modules/setting/cors.go
@@ -0,0 +1,34 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// CORSConfig defines CORS settings
+var CORSConfig = struct {
+ Enabled bool
+ AllowDomain []string // FIXME: this option is from legacy code, it actually works as "AllowedOrigins". When refactoring in the future, the config option should also be renamed together.
+ Methods []string
+ MaxAge time.Duration
+ AllowCredentials bool
+ Headers []string
+ XFrameOptions string
+}{
+ AllowDomain: []string{"*"},
+ Methods: []string{"GET", "HEAD", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"},
+ Headers: []string{"Content-Type", "User-Agent"},
+ MaxAge: 10 * time.Minute,
+ XFrameOptions: "SAMEORIGIN",
+}
+
+func loadCorsFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "cors", &CORSConfig)
+ if CORSConfig.Enabled {
+ log.Info("CORS Service Enabled")
+ }
+}
diff --git a/modules/setting/cron.go b/modules/setting/cron.go
new file mode 100644
index 0000000..7c4cc44
--- /dev/null
+++ b/modules/setting/cron.go
@@ -0,0 +1,32 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import "reflect"
+
+// GetCronSettings maps the cron subsection to the provided config
+func GetCronSettings(name string, config any) (any, error) {
+ return getCronSettings(CfgProvider, name, config)
+}
+
+func getCronSettings(rootCfg ConfigProvider, name string, config any) (any, error) {
+ if err := rootCfg.Section("cron." + name).MapTo(config); err != nil {
+ return config, err
+ }
+
+ typ := reflect.TypeOf(config).Elem()
+ val := reflect.ValueOf(config).Elem()
+
+ for i := 0; i < typ.NumField(); i++ {
+ field := val.Field(i)
+ tpField := typ.Field(i)
+ if tpField.Type.Kind() == reflect.Struct && tpField.Anonymous {
+ if err := rootCfg.Section("cron." + name).MapTo(field.Addr().Interface()); err != nil {
+ return config, err
+ }
+ }
+ }
+
+ return config, nil
+}
diff --git a/modules/setting/cron_test.go b/modules/setting/cron_test.go
new file mode 100644
index 0000000..32f8ecf
--- /dev/null
+++ b/modules/setting/cron_test.go
@@ -0,0 +1,44 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getCronSettings(t *testing.T) {
+ type BaseStruct struct {
+ Base bool
+ Second string
+ }
+
+ type Extended struct {
+ BaseStruct
+ Extend bool
+ }
+
+ iniStr := `
+[cron.test]
+BASE = true
+SECOND = white rabbit
+EXTEND = true
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ extended := &Extended{
+ BaseStruct: BaseStruct{
+ Second: "queen of hearts",
+ },
+ }
+
+ _, err = getCronSettings(cfg, "test", extended)
+ require.NoError(t, err)
+ assert.True(t, extended.Base)
+ assert.EqualValues(t, "white rabbit", extended.Second)
+ assert.True(t, extended.Extend)
+}
diff --git a/modules/setting/database.go b/modules/setting/database.go
new file mode 100644
index 0000000..76fae27
--- /dev/null
+++ b/modules/setting/database.go
@@ -0,0 +1,204 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "net"
+ "net/url"
+ "os"
+ "path/filepath"
+ "strings"
+ "time"
+)
+
+var (
+ // SupportedDatabaseTypes includes all XORM supported databases type, sqlite3 maybe added by `database_sqlite3.go`
+ SupportedDatabaseTypes = []string{"mysql", "postgres"}
+ // DatabaseTypeNames contains the friendly names for all database types
+ DatabaseTypeNames = map[string]string{"mysql": "MySQL", "postgres": "PostgreSQL", "sqlite3": "SQLite3"}
+
+ // EnableSQLite3 use SQLite3, set by build flag
+ EnableSQLite3 bool
+
+ // Database holds the database settings
+ Database = struct {
+ Type DatabaseType
+ Host string
+ Name string
+ User string
+ Passwd string
+ Schema string
+ SSLMode string
+ Path string
+ LogSQL bool
+ MysqlCharset string
+ CharsetCollation string
+ Timeout int // seconds
+ SQLiteJournalMode string
+ DBConnectRetries int
+ DBConnectBackoff time.Duration
+ MaxIdleConns int
+ MaxOpenConns int
+ ConnMaxIdleTime time.Duration
+ ConnMaxLifetime time.Duration
+ IterateBufferSize int
+ AutoMigration bool
+ SlowQueryThreshold time.Duration
+ }{
+ Timeout: 500,
+ IterateBufferSize: 50,
+ }
+)
+
+// LoadDBSetting loads the database settings
+func LoadDBSetting() {
+ loadDBSetting(CfgProvider)
+}
+
+func loadDBSetting(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("database")
+ Database.Type = DatabaseType(sec.Key("DB_TYPE").String())
+
+ Database.Host = sec.Key("HOST").String()
+ Database.Name = sec.Key("NAME").String()
+ Database.User = sec.Key("USER").String()
+ if len(Database.Passwd) == 0 {
+ Database.Passwd = sec.Key("PASSWD").String()
+ }
+ Database.Schema = sec.Key("SCHEMA").String()
+ Database.SSLMode = sec.Key("SSL_MODE").MustString("disable")
+ Database.CharsetCollation = sec.Key("CHARSET_COLLATION").String()
+
+ Database.Path = sec.Key("PATH").MustString(filepath.Join(AppDataPath, "forgejo.db"))
+ Database.Timeout = sec.Key("SQLITE_TIMEOUT").MustInt(500)
+ Database.SQLiteJournalMode = sec.Key("SQLITE_JOURNAL_MODE").MustString("")
+
+ Database.MaxIdleConns = sec.Key("MAX_IDLE_CONNS").MustInt(2)
+ if Database.Type.IsMySQL() {
+ Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(3 * time.Second)
+ } else {
+ Database.ConnMaxLifetime = sec.Key("CONN_MAX_LIFETIME").MustDuration(0)
+ }
+ Database.ConnMaxIdleTime = sec.Key("CONN_MAX_IDLETIME").MustDuration(0)
+ Database.MaxOpenConns = sec.Key("MAX_OPEN_CONNS").MustInt(100)
+
+ Database.IterateBufferSize = sec.Key("ITERATE_BUFFER_SIZE").MustInt(50)
+ Database.LogSQL = sec.Key("LOG_SQL").MustBool(false)
+ Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10)
+ Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second)
+ Database.AutoMigration = sec.Key("AUTO_MIGRATION").MustBool(true)
+
+ deprecatedSetting(rootCfg, "database", "SLOW_QUERY_TRESHOLD", "database", "SLOW_QUERY_THRESHOLD", "1.23")
+ if sec.HasKey("SLOW_QUERY_TRESHOLD") && !sec.HasKey("SLOW_QUERY_THRESHOLD") {
+ Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_TRESHOLD").MustDuration(5 * time.Second)
+ } else {
+ Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_THRESHOLD").MustDuration(5 * time.Second)
+ }
+}
+
+// DBConnStr returns database connection string
+func DBConnStr() (string, error) {
+ var connStr string
+ paramSep := "?"
+ if strings.Contains(Database.Name, paramSep) {
+ paramSep = "&"
+ }
+ switch Database.Type {
+ case "mysql":
+ connType := "tcp"
+ if len(Database.Host) > 0 && Database.Host[0] == '/' { // looks like a unix socket
+ connType = "unix"
+ }
+ tls := Database.SSLMode
+ if tls == "disable" { // allow (Postgres-inspired) default value to work in MySQL
+ tls = "false"
+ }
+ connStr = fmt.Sprintf("%s:%s@%s(%s)/%s%sparseTime=true&tls=%s",
+ Database.User, Database.Passwd, connType, Database.Host, Database.Name, paramSep, tls)
+ case "postgres":
+ connStr = getPostgreSQLConnectionString(Database.Host, Database.User, Database.Passwd, Database.Name, Database.SSLMode)
+ case "sqlite3":
+ if !EnableSQLite3 {
+ return "", errors.New("this Gitea binary was not built with SQLite3 support")
+ }
+ if err := os.MkdirAll(filepath.Dir(Database.Path), os.ModePerm); err != nil {
+ return "", fmt.Errorf("Failed to create directories: %w", err)
+ }
+ journalMode := ""
+ if Database.SQLiteJournalMode != "" {
+ journalMode = "&_journal_mode=" + Database.SQLiteJournalMode
+ }
+ connStr = fmt.Sprintf("file:%s?cache=shared&mode=rwc&_busy_timeout=%d&_txlock=immediate%s",
+ Database.Path, Database.Timeout, journalMode)
+ default:
+ return "", fmt.Errorf("unknown database type: %s", Database.Type)
+ }
+
+ return connStr, nil
+}
+
+// parsePostgreSQLHostPort parses given input in various forms defined in
+// https://www.postgresql.org/docs/current/static/libpq-connect.html#LIBPQ-CONNSTRING
+// and returns proper host and port number.
+func parsePostgreSQLHostPort(info string) (host, port string) {
+ if h, p, err := net.SplitHostPort(info); err == nil {
+ host, port = h, p
+ } else {
+ // treat the "info" as "host", if it's an IPv6 address, remove the wrapper
+ host = info
+ if strings.HasPrefix(host, "[") && strings.HasSuffix(host, "]") {
+ host = host[1 : len(host)-1]
+ }
+ }
+
+ // set fallback values
+ if host == "" {
+ host = "127.0.0.1"
+ }
+ if port == "" {
+ port = "5432"
+ }
+ return host, port
+}
+
+func getPostgreSQLConnectionString(dbHost, dbUser, dbPasswd, dbName, dbsslMode string) (connStr string) {
+ dbName, dbParam, _ := strings.Cut(dbName, "?")
+ host, port := parsePostgreSQLHostPort(dbHost)
+ connURL := url.URL{
+ Scheme: "postgres",
+ User: url.UserPassword(dbUser, dbPasswd),
+ Host: net.JoinHostPort(host, port),
+ Path: dbName,
+ OmitHost: false,
+ RawQuery: dbParam,
+ }
+ query := connURL.Query()
+ if strings.HasPrefix(host, "/") { // looks like a unix socket
+ query.Add("host", host)
+ connURL.Host = ":" + port
+ }
+ query.Set("sslmode", dbsslMode)
+ connURL.RawQuery = query.Encode()
+ return connURL.String()
+}
+
+type DatabaseType string
+
+func (t DatabaseType) String() string {
+ return string(t)
+}
+
+func (t DatabaseType) IsSQLite3() bool {
+ return t == "sqlite3"
+}
+
+func (t DatabaseType) IsMySQL() bool {
+ return t == "mysql"
+}
+
+func (t DatabaseType) IsPostgreSQL() bool {
+ return t == "postgres"
+}
diff --git a/modules/setting/database_sqlite.go b/modules/setting/database_sqlite.go
new file mode 100644
index 0000000..c1037cf
--- /dev/null
+++ b/modules/setting/database_sqlite.go
@@ -0,0 +1,15 @@
+//go:build sqlite
+
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ _ "github.com/mattn/go-sqlite3"
+)
+
+func init() {
+ EnableSQLite3 = true
+ SupportedDatabaseTypes = append(SupportedDatabaseTypes, "sqlite3")
+}
diff --git a/modules/setting/database_test.go b/modules/setting/database_test.go
new file mode 100644
index 0000000..a742d54
--- /dev/null
+++ b/modules/setting/database_test.go
@@ -0,0 +1,109 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_parsePostgreSQLHostPort(t *testing.T) {
+ tests := map[string]struct {
+ HostPort string
+ Host string
+ Port string
+ }{
+ "host-port": {
+ HostPort: "127.0.0.1:1234",
+ Host: "127.0.0.1",
+ Port: "1234",
+ },
+ "no-port": {
+ HostPort: "127.0.0.1",
+ Host: "127.0.0.1",
+ Port: "5432",
+ },
+ "ipv6-port": {
+ HostPort: "[::1]:1234",
+ Host: "::1",
+ Port: "1234",
+ },
+ "ipv6-no-port": {
+ HostPort: "[::1]",
+ Host: "::1",
+ Port: "5432",
+ },
+ "unix-socket": {
+ HostPort: "/tmp/pg.sock:1234",
+ Host: "/tmp/pg.sock",
+ Port: "1234",
+ },
+ "unix-socket-no-port": {
+ HostPort: "/tmp/pg.sock",
+ Host: "/tmp/pg.sock",
+ Port: "5432",
+ },
+ }
+ for k, test := range tests {
+ t.Run(k, func(t *testing.T) {
+ t.Log(test.HostPort)
+ host, port := parsePostgreSQLHostPort(test.HostPort)
+ assert.Equal(t, test.Host, host)
+ assert.Equal(t, test.Port, port)
+ })
+ }
+}
+
+func Test_getPostgreSQLConnectionString(t *testing.T) {
+ tests := []struct {
+ Host string
+ User string
+ Passwd string
+ Name string
+ SSLMode string
+ Output string
+ }{
+ {
+ Host: "", // empty means default
+ Output: "postgres://:@127.0.0.1:5432?sslmode=",
+ },
+ {
+ Host: "/tmp/pg.sock",
+ User: "testuser",
+ Passwd: "space space !#$%^^%^```-=?=",
+ Name: "gitea",
+ SSLMode: "false",
+ Output: "postgres://testuser:space%20space%20%21%23$%25%5E%5E%25%5E%60%60%60-=%3F=@:5432/gitea?host=%2Ftmp%2Fpg.sock&sslmode=false",
+ },
+ {
+ Host: "/tmp/pg.sock:6432",
+ User: "testuser",
+ Passwd: "pass",
+ Name: "gitea",
+ SSLMode: "false",
+ Output: "postgres://testuser:pass@:6432/gitea?host=%2Ftmp%2Fpg.sock&sslmode=false",
+ },
+ {
+ Host: "localhost",
+ User: "pgsqlusername",
+ Passwd: "I love Gitea!",
+ Name: "gitea",
+ SSLMode: "true",
+ Output: "postgres://pgsqlusername:I%20love%20Gitea%21@localhost:5432/gitea?sslmode=true",
+ },
+ {
+ Host: "localhost:1234",
+ User: "user",
+ Passwd: "pass",
+ Name: "gitea?param=1",
+ Output: "postgres://user:pass@localhost:1234/gitea?param=1&sslmode=",
+ },
+ }
+
+ for _, test := range tests {
+ connStr := getPostgreSQLConnectionString(test.Host, test.User, test.Passwd, test.Name, test.SSLMode)
+ assert.Equal(t, test.Output, connStr)
+ }
+}
diff --git a/modules/setting/f3.go b/modules/setting/f3.go
new file mode 100644
index 0000000..8669b70
--- /dev/null
+++ b/modules/setting/f3.go
@@ -0,0 +1,26 @@
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Friendly Forge Format (F3) settings
+var (
+ F3 = struct {
+ Enabled bool
+ }{
+ Enabled: false,
+ }
+)
+
+func LoadF3Setting() {
+ loadF3From(CfgProvider)
+}
+
+func loadF3From(rootCfg ConfigProvider) {
+ if err := rootCfg.Section("F3").MapTo(&F3); err != nil {
+ log.Fatal("Failed to map F3 settings: %v", err)
+ }
+}
diff --git a/modules/setting/federation.go b/modules/setting/federation.go
new file mode 100644
index 0000000..aeb3068
--- /dev/null
+++ b/modules/setting/federation.go
@@ -0,0 +1,51 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/go-fed/httpsig"
+)
+
+// Federation settings
+var (
+ Federation = struct {
+ Enabled bool
+ ShareUserStatistics bool
+ MaxSize int64
+ Algorithms []string
+ DigestAlgorithm string
+ GetHeaders []string
+ PostHeaders []string
+ }{
+ Enabled: false,
+ ShareUserStatistics: true,
+ MaxSize: 4,
+ Algorithms: []string{"rsa-sha256", "rsa-sha512", "ed25519"},
+ DigestAlgorithm: "SHA-256",
+ GetHeaders: []string{"(request-target)", "Date", "Host"},
+ PostHeaders: []string{"(request-target)", "Date", "Host", "Digest"},
+ }
+)
+
+// HttpsigAlgs is a constant slice of httpsig algorithm objects
+var HttpsigAlgs []httpsig.Algorithm
+
+func loadFederationFrom(rootCfg ConfigProvider) {
+ if err := rootCfg.Section("federation").MapTo(&Federation); err != nil {
+ log.Fatal("Failed to map Federation settings: %v", err)
+ } else if !httpsig.IsSupportedDigestAlgorithm(Federation.DigestAlgorithm) {
+ log.Fatal("unsupported digest algorithm: %s", Federation.DigestAlgorithm)
+ return
+ }
+
+ // Get MaxSize in bytes instead of MiB
+ Federation.MaxSize = 1 << 20 * Federation.MaxSize
+
+ HttpsigAlgs = make([]httpsig.Algorithm, len(Federation.Algorithms))
+ for i, alg := range Federation.Algorithms {
+ HttpsigAlgs[i] = httpsig.Algorithm(alg)
+ }
+}
diff --git a/modules/setting/forgejo_storage_test.go b/modules/setting/forgejo_storage_test.go
new file mode 100644
index 0000000..d91bff5
--- /dev/null
+++ b/modules/setting/forgejo_storage_test.go
@@ -0,0 +1,264 @@
+// SPDX-License-Identifier: MIT
+
+//
+// Tests verifying the Forgejo documentation on storage settings is correct
+//
+// https://forgejo.org/docs/v1.20/admin/storage/
+//
+
+package setting
+
+import (
+ "fmt"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestForgejoDocs_StorageTypes(t *testing.T) {
+ iniStr := `
+[server]
+APP_DATA_PATH = /
+`
+ testStorageTypesDefaultAndSpecificStorage(t, iniStr)
+}
+
+func testStorageGetPath(storage *Storage) string {
+ if storage.Type == MinioStorageType {
+ return storage.MinioConfig.BasePath
+ }
+ return storage.Path
+}
+
+var testSectionToBasePath = map[string]string{
+ "attachment": "attachments",
+ "lfs": "lfs",
+ "avatar": "avatars",
+ "repo-avatar": "repo-avatars",
+ "repo-archive": "repo-archive",
+ "packages": "packages",
+ "storage.actions_log": "actions_log",
+ "actions.artifacts": "actions_artifacts",
+}
+
+type testSectionToPathFun func(StorageType, string) string
+
+func testBuildPath(t StorageType, path string) string {
+ if t == LocalStorageType {
+ return "/" + path
+ }
+ return path + "/"
+}
+
+func testSectionToPath(t StorageType, section string) string {
+ return testBuildPath(t, testSectionToBasePath[section])
+}
+
+func testSpecificPath(t StorageType, section string) string {
+ if t == LocalStorageType {
+ return "/specific_local_path"
+ }
+ return "specific_s3_base_path/"
+}
+
+func testDefaultDir(t StorageType) string {
+ if t == LocalStorageType {
+ return "default_local_path"
+ }
+ return "default_s3_base_path"
+}
+
+func testDefaultPath(t StorageType) string {
+ return testBuildPath(t, testDefaultDir(t))
+}
+
+func testSectionToDefaultPath(t StorageType, section string) string {
+ return testBuildPath(t, filepath.Join(testDefaultDir(t), testSectionToPath(t, section)))
+}
+
+func testLegacyPath(t StorageType, section string) string {
+ return testBuildPath(t, fmt.Sprintf("legacy_%s_path", section))
+}
+
+func testStorageTypeToSetting(t StorageType) string {
+ if t == LocalStorageType {
+ return "PATH"
+ }
+ return "MINIO_BASE_PATH"
+}
+
+var testSectionToLegacy = map[string]string{
+ "lfs": fmt.Sprintf(`
+[server]
+APP_DATA_PATH = /
+LFS_CONTENT_PATH = %s
+`, testLegacyPath(LocalStorageType, "lfs")),
+ "avatar": fmt.Sprintf(`
+[picture]
+AVATAR_UPLOAD_PATH = %s
+`, testLegacyPath(LocalStorageType, "avatar")),
+ "repo-avatar": fmt.Sprintf(`
+[picture]
+REPOSITORY_AVATAR_UPLOAD_PATH = %s
+`, testLegacyPath(LocalStorageType, "repo-avatar")),
+}
+
+func testStorageTypesDefaultAndSpecificStorage(t *testing.T, iniStr string) {
+ storageType := MinioStorageType
+ t.Run(string(storageType), func(t *testing.T) {
+ t.Run("override type minio", func(t *testing.T) {
+ storageSection := `
+[storage]
+STORAGE_TYPE = minio
+`
+ testStorageTypesSpecificStorages(t, iniStr+storageSection, storageType, testSectionToPath, testSectionToPath)
+ })
+ })
+
+ storageType = LocalStorageType
+
+ t.Run(string(storageType), func(t *testing.T) {
+ storageSection := ""
+ testStorageTypesSpecificStorages(t, iniStr+storageSection, storageType, testSectionToPath, testSectionToPath)
+
+ t.Run("override type local", func(t *testing.T) {
+ storageSection := `
+[storage]
+STORAGE_TYPE = local
+`
+ testStorageTypesSpecificStorages(t, iniStr+storageSection, storageType, testSectionToPath, testSectionToPath)
+
+ storageSection = fmt.Sprintf(`
+[storage]
+STORAGE_TYPE = local
+PATH = %s
+`, testDefaultPath(LocalStorageType))
+ testStorageTypesSpecificStorageSections(t, iniStr+storageSection, storageType, testSectionToDefaultPath, testSectionToPath)
+ })
+ })
+}
+
+func testStorageTypesSpecificStorageSections(t *testing.T, iniStr string, defaultStorageType StorageType, defaultStorageTypePath, testSectionToPath testSectionToPathFun) {
+ testSectionsMap := map[string]**Storage{
+ "attachment": &Attachment.Storage,
+ "lfs": &LFS.Storage,
+ "avatar": &Avatar.Storage,
+ "repo-avatar": &RepoAvatar.Storage,
+ "repo-archive": &RepoArchive.Storage,
+ "packages": &Packages.Storage,
+ // there are inconsistencies in how actions storage is determined in v1.20
+ // it is still alpha and undocumented and is ignored for now
+ //"storage.actions_log": &Actions.LogStorage,
+ //"actions.artifacts": &Actions.ArtifactStorage,
+ }
+
+ for sectionName, storage := range testSectionsMap {
+ t.Run(sectionName, func(t *testing.T) {
+ testStorageTypesSpecificStorage(t, iniStr, defaultStorageType, defaultStorageTypePath, testSectionToPath, sectionName, storage)
+ })
+ }
+}
+
+func testStorageTypesSpecificStorages(t *testing.T, iniStr string, defaultStorageType StorageType, defaultStorageTypePath, testSectionToPath testSectionToPathFun) {
+ testSectionsMap := map[string]**Storage{
+ "attachment": &Attachment.Storage,
+ "lfs": &LFS.Storage,
+ "avatar": &Avatar.Storage,
+ "repo-avatar": &RepoAvatar.Storage,
+ "repo-archive": &RepoArchive.Storage,
+ "packages": &Packages.Storage,
+ "storage.actions_log": &Actions.LogStorage,
+ "actions.artifacts": &Actions.ArtifactStorage,
+ }
+
+ for sectionName, storage := range testSectionsMap {
+ t.Run(sectionName, func(t *testing.T) {
+ if legacy, ok := testSectionToLegacy[sectionName]; ok {
+ if defaultStorageType == LocalStorageType {
+ t.Run("legacy local", func(t *testing.T) {
+ testStorageTypesSpecificStorage(t, iniStr+legacy, LocalStorageType, testLegacyPath, testSectionToPath, sectionName, storage)
+ testStorageTypesSpecificStorageTypeOverride(t, iniStr+legacy, LocalStorageType, testLegacyPath, testSectionToPath, sectionName, storage)
+ })
+ } else {
+ t.Run("legacy minio", func(t *testing.T) {
+ testStorageTypesSpecificStorage(t, iniStr+legacy, MinioStorageType, defaultStorageTypePath, testSectionToPath, sectionName, storage)
+ testStorageTypesSpecificStorageTypeOverride(t, iniStr+legacy, LocalStorageType, testLegacyPath, testSectionToPath, sectionName, storage)
+ })
+ }
+ }
+ for _, specificStorageType := range storageTypes {
+ testStorageTypesSpecificStorageTypeOverride(t, iniStr, specificStorageType, defaultStorageTypePath, testSectionToPath, sectionName, storage)
+ }
+ })
+ }
+}
+
+func testStorageTypesSpecificStorage(t *testing.T, iniStr string, defaultStorageType StorageType, defaultStorageTypePath, testSectionToPath testSectionToPathFun, sectionName string, storage **Storage) {
+ var section string
+
+ //
+ // Specific section is absent
+ //
+ testStoragePathMatch(t, iniStr, defaultStorageType, defaultStorageTypePath, sectionName, storage)
+
+ //
+ // Specific section is empty
+ //
+ section = fmt.Sprintf(`
+[%s]
+`,
+ sectionName)
+ testStoragePathMatch(t, iniStr+section, defaultStorageType, defaultStorageTypePath, sectionName, storage)
+
+ //
+ // Specific section with a path override
+ //
+ section = fmt.Sprintf(`
+[%s]
+%s = %s
+`,
+ sectionName,
+ testStorageTypeToSetting(defaultStorageType),
+ testSpecificPath(defaultStorageType, ""))
+ testStoragePathMatch(t, iniStr+section, defaultStorageType, testSpecificPath, sectionName, storage)
+}
+
+func testStorageTypesSpecificStorageTypeOverride(t *testing.T, iniStr string, overrideStorageType StorageType, defaultStorageTypePath, testSectionToPath testSectionToPathFun, sectionName string, storage **Storage) {
+ var section string
+ t.Run("specific-"+string(overrideStorageType), func(t *testing.T) {
+ //
+ // Specific section with a path and storage type override
+ //
+ section = fmt.Sprintf(`
+[%s]
+STORAGE_TYPE = %s
+%s = %s
+`,
+ sectionName,
+ overrideStorageType,
+ testStorageTypeToSetting(overrideStorageType),
+ testSpecificPath(overrideStorageType, ""))
+ testStoragePathMatch(t, iniStr+section, overrideStorageType, testSpecificPath, sectionName, storage)
+
+ //
+ // Specific section with type override
+ //
+ section = fmt.Sprintf(`
+[%s]
+STORAGE_TYPE = %s
+`,
+ sectionName,
+ overrideStorageType)
+ testStoragePathMatch(t, iniStr+section, overrideStorageType, defaultStorageTypePath, sectionName, storage)
+ })
+}
+
+func testStoragePathMatch(t *testing.T, iniStr string, storageType StorageType, testSectionToPath testSectionToPathFun, section string, storage **Storage) {
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err, iniStr)
+ require.NoError(t, loadCommonSettingsFrom(cfg), iniStr)
+ assert.EqualValues(t, testSectionToPath(storageType, section), testStorageGetPath(*storage), iniStr)
+ assert.EqualValues(t, storageType, (*storage).Type, iniStr)
+}
diff --git a/modules/setting/git.go b/modules/setting/git.go
new file mode 100644
index 0000000..812c4fe
--- /dev/null
+++ b/modules/setting/git.go
@@ -0,0 +1,123 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Git settings
+var Git = struct {
+ Path string
+ HomePath string
+ DisableDiffHighlight bool
+
+ MaxGitDiffLines int
+ MaxGitDiffLineCharacters int
+ MaxGitDiffFiles int
+ CommitsRangeSize int // CommitsRangeSize the default commits range size
+ BranchesRangeSize int // BranchesRangeSize the default branches range size
+ VerbosePush bool
+ VerbosePushDelay time.Duration
+ GCArgs []string `ini:"GC_ARGS" delim:" "`
+ EnableAutoGitWireProtocol bool
+ PullRequestPushMessage bool
+ LargeObjectThreshold int64
+ DisableCoreProtectNTFS bool
+ DisablePartialClone bool
+ Timeout struct {
+ Default int
+ Migrate int
+ Mirror int
+ Clone int
+ Pull int
+ GC int `ini:"GC"`
+ Grep int
+ } `ini:"git.timeout"`
+}{
+ DisableDiffHighlight: false,
+ MaxGitDiffLines: 1000,
+ MaxGitDiffLineCharacters: 5000,
+ MaxGitDiffFiles: 100,
+ CommitsRangeSize: 50,
+ BranchesRangeSize: 20,
+ VerbosePush: true,
+ VerbosePushDelay: 5 * time.Second,
+ GCArgs: []string{},
+ EnableAutoGitWireProtocol: true,
+ PullRequestPushMessage: true,
+ LargeObjectThreshold: 1024 * 1024,
+ DisablePartialClone: false,
+ Timeout: struct {
+ Default int
+ Migrate int
+ Mirror int
+ Clone int
+ Pull int
+ GC int `ini:"GC"`
+ Grep int
+ }{
+ Default: 360,
+ Migrate: 600,
+ Mirror: 300,
+ Clone: 300,
+ Pull: 300,
+ GC: 60,
+ Grep: 2,
+ },
+}
+
+type GitConfigType struct {
+ Options map[string]string // git config key is case-insensitive, always use lower-case
+}
+
+func (c *GitConfigType) SetOption(key, val string) {
+ c.Options[strings.ToLower(key)] = val
+}
+
+func (c *GitConfigType) GetOption(key string) string {
+ return c.Options[strings.ToLower(key)]
+}
+
+var GitConfig = GitConfigType{
+ Options: make(map[string]string),
+}
+
+func loadGitFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("git")
+ if err := sec.MapTo(&Git); err != nil {
+ log.Fatal("Failed to map Git settings: %v", err)
+ }
+
+ secGitConfig := rootCfg.Section("git.config")
+ GitConfig.Options = make(map[string]string)
+ GitConfig.SetOption("diff.algorithm", "histogram")
+ GitConfig.SetOption("core.logAllRefUpdates", "true")
+ GitConfig.SetOption("gc.reflogExpire", "90")
+
+ secGitReflog := rootCfg.Section("git.reflog")
+ if secGitReflog.HasKey("ENABLED") {
+ deprecatedSetting(rootCfg, "git.reflog", "ENABLED", "git.config", "core.logAllRefUpdates", "1.21")
+ GitConfig.SetOption("core.logAllRefUpdates", secGitReflog.Key("ENABLED").In("true", []string{"true", "false"}))
+ }
+ if secGitReflog.HasKey("EXPIRATION") {
+ deprecatedSetting(rootCfg, "git.reflog", "EXPIRATION", "git.config", "core.reflogExpire", "1.21")
+ GitConfig.SetOption("gc.reflogExpire", secGitReflog.Key("EXPIRATION").String())
+ }
+
+ for _, key := range secGitConfig.Keys() {
+ GitConfig.SetOption(key.Name(), key.String())
+ }
+
+ Git.HomePath = sec.Key("HOME_PATH").MustString("home")
+ if !filepath.IsAbs(Git.HomePath) {
+ Git.HomePath = filepath.Join(AppDataPath, Git.HomePath)
+ } else {
+ Git.HomePath = filepath.Clean(Git.HomePath)
+ }
+}
diff --git a/modules/setting/git_test.go b/modules/setting/git_test.go
new file mode 100644
index 0000000..34427f9
--- /dev/null
+++ b/modules/setting/git_test.go
@@ -0,0 +1,66 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGitConfig(t *testing.T) {
+ oldGit := Git
+ oldGitConfig := GitConfig
+ defer func() {
+ Git = oldGit
+ GitConfig = oldGitConfig
+ }()
+
+ cfg, err := NewConfigProviderFromData(`
+[git.config]
+a.b = 1
+`)
+ require.NoError(t, err)
+ loadGitFrom(cfg)
+ assert.EqualValues(t, "1", GitConfig.Options["a.b"])
+ assert.EqualValues(t, "histogram", GitConfig.Options["diff.algorithm"])
+
+ cfg, err = NewConfigProviderFromData(`
+[git.config]
+diff.algorithm = other
+`)
+ require.NoError(t, err)
+ loadGitFrom(cfg)
+ assert.EqualValues(t, "other", GitConfig.Options["diff.algorithm"])
+}
+
+func TestGitReflog(t *testing.T) {
+ oldGit := Git
+ oldGitConfig := GitConfig
+ defer func() {
+ Git = oldGit
+ GitConfig = oldGitConfig
+ }()
+
+ // default reflog config without legacy options
+ cfg, err := NewConfigProviderFromData(``)
+ require.NoError(t, err)
+ loadGitFrom(cfg)
+
+ assert.EqualValues(t, "true", GitConfig.GetOption("core.logAllRefUpdates"))
+ assert.EqualValues(t, "90", GitConfig.GetOption("gc.reflogExpire"))
+
+ // custom reflog config by legacy options
+ cfg, err = NewConfigProviderFromData(`
+[git.reflog]
+ENABLED = false
+EXPIRATION = 123
+`)
+ require.NoError(t, err)
+ loadGitFrom(cfg)
+
+ assert.EqualValues(t, "false", GitConfig.GetOption("core.logAllRefUpdates"))
+ assert.EqualValues(t, "123", GitConfig.GetOption("gc.reflogExpire"))
+}
diff --git a/modules/setting/highlight.go b/modules/setting/highlight.go
new file mode 100644
index 0000000..6291b08
--- /dev/null
+++ b/modules/setting/highlight.go
@@ -0,0 +1,17 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+func GetHighlightMapping() map[string]string {
+ highlightMapping := map[string]string{}
+ if CfgProvider == nil {
+ return highlightMapping
+ }
+
+ keys := CfgProvider.Section("highlight.mapping").Keys()
+ for _, key := range keys {
+ highlightMapping[key.Name()] = key.Value()
+ }
+ return highlightMapping
+}
diff --git a/modules/setting/i18n.go b/modules/setting/i18n.go
new file mode 100644
index 0000000..889e52b
--- /dev/null
+++ b/modules/setting/i18n.go
@@ -0,0 +1,68 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// defaultI18nLangNames must be a slice, we need the order
+var defaultI18nLangNames = []string{
+ "en-US", "English",
+ "zh-CN", "简体中文",
+ "zh-HK", "繁體中文(香港)",
+ "zh-TW", "繁體中文(台灣)",
+ "de-DE", "Deutsch",
+ "fr-FR", "Français",
+ "nl-NL", "Nederlands",
+ "lv-LV", "Latviešu",
+ "ru-RU", "Русский",
+ "uk-UA", "Українська",
+ "ja-JP", "日本語",
+ "es-ES", "Español",
+ "pt-BR", "Português do Brasil",
+ "pt-PT", "Português de Portugal",
+ "pl-PL", "Polski",
+ "bg", "Български",
+ "it-IT", "Italiano",
+ "fi-FI", "Suomi",
+ "fil", "Filipino",
+ "eo", "Esperanto",
+ "tr-TR", "Türkçe",
+ "cs-CZ", "Čeština",
+ "sl", "Slovenščina",
+ "sv-SE", "Svenska",
+ "ko-KR", "한국어",
+ "el-GR", "Ελληνικά",
+ "fa-IR", "فارسی",
+ "hu-HU", "Magyar nyelv",
+ "id-ID", "Bahasa Indonesia",
+}
+
+func defaultI18nLangs() (res []string) {
+ for i := 0; i < len(defaultI18nLangNames); i += 2 {
+ res = append(res, defaultI18nLangNames[i])
+ }
+ return res
+}
+
+func defaultI18nNames() (res []string) {
+ for i := 0; i < len(defaultI18nLangNames); i += 2 {
+ res = append(res, defaultI18nLangNames[i+1])
+ }
+ return res
+}
+
+var (
+ // I18n settings
+ Langs []string
+ Names []string
+)
+
+func loadI18nFrom(rootCfg ConfigProvider) {
+ Langs = rootCfg.Section("i18n").Key("LANGS").Strings(",")
+ if len(Langs) == 0 {
+ Langs = defaultI18nLangs()
+ }
+ Names = rootCfg.Section("i18n").Key("NAMES").Strings(",")
+ if len(Names) == 0 {
+ Names = defaultI18nNames()
+ }
+}
diff --git a/modules/setting/incoming_email.go b/modules/setting/incoming_email.go
new file mode 100644
index 0000000..287e729
--- /dev/null
+++ b/modules/setting/incoming_email.go
@@ -0,0 +1,89 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "net/mail"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+var IncomingEmail = struct {
+ Enabled bool
+ ReplyToAddress string
+ TokenPlaceholder string `ini:"-"`
+ Host string
+ Port int
+ UseTLS bool `ini:"USE_TLS"`
+ SkipTLSVerify bool `ini:"SKIP_TLS_VERIFY"`
+ Username string
+ Password string
+ Mailbox string
+ DeleteHandledMessage bool
+ MaximumMessageSize uint32
+}{
+ Mailbox: "INBOX",
+ DeleteHandledMessage: true,
+ TokenPlaceholder: "%{token}",
+ MaximumMessageSize: 10485760,
+}
+
+func loadIncomingEmailFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "email.incoming", &IncomingEmail)
+
+ if !IncomingEmail.Enabled {
+ return
+ }
+
+ // Handle aliases
+ sec := rootCfg.Section("email.incoming")
+ if sec.HasKey("USER") && !sec.HasKey("USERNAME") {
+ IncomingEmail.Username = sec.Key("USER").String()
+ }
+ if sec.HasKey("PASSWD") && !sec.HasKey("PASSWORD") {
+ IncomingEmail.Password = sec.Key("PASSWD").String()
+ }
+
+ // Infer Port if not set
+ if IncomingEmail.Port == 0 {
+ if IncomingEmail.UseTLS {
+ IncomingEmail.Port = 993
+ } else {
+ IncomingEmail.Port = 143
+ }
+ }
+
+ if err := checkReplyToAddress(); err != nil {
+ log.Fatal("Invalid incoming_mail.REPLY_TO_ADDRESS (%s): %v", IncomingEmail.ReplyToAddress, err)
+ }
+}
+
+func checkReplyToAddress() error {
+ parsed, err := mail.ParseAddress(IncomingEmail.ReplyToAddress)
+ if err != nil {
+ return err
+ }
+
+ if parsed.Name != "" {
+ return fmt.Errorf("name must not be set")
+ }
+
+ c := strings.Count(IncomingEmail.ReplyToAddress, IncomingEmail.TokenPlaceholder)
+ switch c {
+ case 0:
+ return fmt.Errorf("%s must appear in the user part of the address (before the @)", IncomingEmail.TokenPlaceholder)
+ case 1:
+ default:
+ return fmt.Errorf("%s must appear only once", IncomingEmail.TokenPlaceholder)
+ }
+
+ parts := strings.Split(IncomingEmail.ReplyToAddress, "@")
+ if !strings.Contains(parts[0], IncomingEmail.TokenPlaceholder) {
+ return fmt.Errorf("%s must appear in the user part of the address (before the @)", IncomingEmail.TokenPlaceholder)
+ }
+
+ return nil
+}
diff --git a/modules/setting/incoming_email_test.go b/modules/setting/incoming_email_test.go
new file mode 100644
index 0000000..0fdd44d
--- /dev/null
+++ b/modules/setting/incoming_email_test.go
@@ -0,0 +1,74 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_loadIncomingEmailFrom(t *testing.T) {
+ makeBaseConfig := func() (ConfigProvider, ConfigSection) {
+ cfg, _ := NewConfigProviderFromData("")
+ sec := cfg.Section("email.incoming")
+ sec.NewKey("ENABLED", "true")
+ sec.NewKey("REPLY_TO_ADDRESS", "forge+%{token}@example.com")
+
+ return cfg, sec
+ }
+ resetIncomingEmailPort := func() func() {
+ return func() {
+ IncomingEmail.Port = 0
+ }
+ }
+
+ t.Run("aliases", func(t *testing.T) {
+ cfg, sec := makeBaseConfig()
+ sec.NewKey("USER", "jane.doe@example.com")
+ sec.NewKey("PASSWD", "y0u'll n3v3r gUess th1S!!1")
+
+ loadIncomingEmailFrom(cfg)
+
+ assert.EqualValues(t, "jane.doe@example.com", IncomingEmail.Username)
+ assert.EqualValues(t, "y0u'll n3v3r gUess th1S!!1", IncomingEmail.Password)
+ })
+
+ t.Run("Port settings", func(t *testing.T) {
+ t.Run("no port, no tls", func(t *testing.T) {
+ defer resetIncomingEmailPort()()
+ cfg, sec := makeBaseConfig()
+
+ // False is the default, but we test it explicitly.
+ sec.NewKey("USE_TLS", "false")
+
+ loadIncomingEmailFrom(cfg)
+
+ assert.EqualValues(t, 143, IncomingEmail.Port)
+ })
+
+ t.Run("no port, with tls", func(t *testing.T) {
+ defer resetIncomingEmailPort()()
+ cfg, sec := makeBaseConfig()
+
+ sec.NewKey("USE_TLS", "true")
+
+ loadIncomingEmailFrom(cfg)
+
+ assert.EqualValues(t, 993, IncomingEmail.Port)
+ })
+
+ t.Run("port overrides tls", func(t *testing.T) {
+ defer resetIncomingEmailPort()()
+ cfg, sec := makeBaseConfig()
+
+ sec.NewKey("PORT", "1993")
+ sec.NewKey("USE_TLS", "true")
+
+ loadIncomingEmailFrom(cfg)
+
+ assert.EqualValues(t, 1993, IncomingEmail.Port)
+ })
+ })
+}
diff --git a/modules/setting/indexer.go b/modules/setting/indexer.go
new file mode 100644
index 0000000..3c96b58
--- /dev/null
+++ b/modules/setting/indexer.go
@@ -0,0 +1,119 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/url"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/gobwas/glob"
+)
+
+// Indexer settings
+var Indexer = struct {
+ IssueType string
+ IssuePath string
+ IssueConnStr string
+ IssueConnAuth string
+ IssueIndexerName string
+ StartupTimeout time.Duration
+
+ RepoIndexerEnabled bool
+ RepoIndexerRepoTypes []string
+ RepoType string
+ RepoPath string
+ RepoConnStr string
+ RepoIndexerName string
+ MaxIndexerFileSize int64
+ IncludePatterns []Glob
+ ExcludePatterns []Glob
+ ExcludeVendored bool
+}{
+ IssueType: "bleve",
+ IssuePath: "indexers/issues.bleve",
+ IssueConnStr: "",
+ IssueConnAuth: "",
+ IssueIndexerName: "gitea_issues",
+
+ RepoIndexerEnabled: false,
+ RepoIndexerRepoTypes: []string{"sources", "forks", "mirrors", "templates"},
+ RepoType: "bleve",
+ RepoPath: "indexers/repos.bleve",
+ RepoConnStr: "",
+ RepoIndexerName: "gitea_codes",
+ MaxIndexerFileSize: 1024 * 1024,
+ ExcludeVendored: true,
+}
+
+type Glob struct {
+ glob glob.Glob
+ pattern string
+}
+
+func (g *Glob) Match(s string) bool {
+ return g.glob.Match(s)
+}
+
+func (g *Glob) Pattern() string {
+ return g.pattern
+}
+
+func loadIndexerFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("indexer")
+ Indexer.IssueType = sec.Key("ISSUE_INDEXER_TYPE").MustString("bleve")
+ Indexer.IssuePath = filepath.ToSlash(sec.Key("ISSUE_INDEXER_PATH").MustString(filepath.ToSlash(filepath.Join(AppDataPath, "indexers/issues.bleve"))))
+ if !filepath.IsAbs(Indexer.IssuePath) {
+ Indexer.IssuePath = filepath.ToSlash(filepath.Join(AppWorkPath, Indexer.IssuePath))
+ }
+ Indexer.IssueConnStr = sec.Key("ISSUE_INDEXER_CONN_STR").MustString(Indexer.IssueConnStr)
+
+ if Indexer.IssueType == "meilisearch" {
+ u, err := url.Parse(Indexer.IssueConnStr)
+ if err != nil {
+ log.Warn("Failed to parse ISSUE_INDEXER_CONN_STR: %v", err)
+ u = &url.URL{}
+ }
+ Indexer.IssueConnAuth, _ = u.User.Password()
+ u.User = nil
+ Indexer.IssueConnStr = u.String()
+ }
+
+ Indexer.IssueIndexerName = sec.Key("ISSUE_INDEXER_NAME").MustString(Indexer.IssueIndexerName)
+
+ Indexer.RepoIndexerEnabled = sec.Key("REPO_INDEXER_ENABLED").MustBool(false)
+ Indexer.RepoIndexerRepoTypes = strings.Split(sec.Key("REPO_INDEXER_REPO_TYPES").MustString("sources,forks,mirrors,templates"), ",")
+ Indexer.RepoType = sec.Key("REPO_INDEXER_TYPE").MustString("bleve")
+ Indexer.RepoPath = filepath.ToSlash(sec.Key("REPO_INDEXER_PATH").MustString(filepath.ToSlash(filepath.Join(AppDataPath, "indexers/repos.bleve"))))
+ if !filepath.IsAbs(Indexer.RepoPath) {
+ Indexer.RepoPath = filepath.ToSlash(filepath.Join(AppWorkPath, Indexer.RepoPath))
+ }
+ Indexer.RepoConnStr = sec.Key("REPO_INDEXER_CONN_STR").MustString("")
+ Indexer.RepoIndexerName = sec.Key("REPO_INDEXER_NAME").MustString("gitea_codes")
+
+ Indexer.IncludePatterns = IndexerGlobFromString(sec.Key("REPO_INDEXER_INCLUDE").MustString(""))
+ Indexer.ExcludePatterns = IndexerGlobFromString(sec.Key("REPO_INDEXER_EXCLUDE").MustString(""))
+ Indexer.ExcludeVendored = sec.Key("REPO_INDEXER_EXCLUDE_VENDORED").MustBool(true)
+ Indexer.MaxIndexerFileSize = sec.Key("MAX_FILE_SIZE").MustInt64(1024 * 1024)
+ Indexer.StartupTimeout = sec.Key("STARTUP_TIMEOUT").MustDuration(30 * time.Second)
+}
+
+// IndexerGlobFromString parses a comma separated list of patterns and returns a glob.Glob slice suited for repo indexing
+func IndexerGlobFromString(globstr string) []Glob {
+ extarr := make([]Glob, 0, 10)
+ for _, expr := range strings.Split(strings.ToLower(globstr), ",") {
+ expr = strings.TrimSpace(expr)
+ if expr != "" {
+ if g, err := glob.Compile(expr, '.', '/'); err != nil {
+ log.Info("Invalid glob expression '%s' (skipped): %v", expr, err)
+ } else {
+ extarr = append(extarr, Glob{glob: g, pattern: expr})
+ }
+ }
+ }
+ return extarr
+}
diff --git a/modules/setting/indexer_test.go b/modules/setting/indexer_test.go
new file mode 100644
index 0000000..8f0437b
--- /dev/null
+++ b/modules/setting/indexer_test.go
@@ -0,0 +1,71 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type indexerMatchList struct {
+ value string
+ position int
+}
+
+func Test_newIndexerGlobSettings(t *testing.T) {
+ checkGlobMatch(t, "", []indexerMatchList{})
+ checkGlobMatch(t, " ", []indexerMatchList{})
+ checkGlobMatch(t, "data, */data, */data/*, **/data/*, **/data/**", []indexerMatchList{
+ {"", -1},
+ {"don't", -1},
+ {"data", 0},
+ {"/data", 1},
+ {"x/data", 1},
+ {"x/data/y", 2},
+ {"a/b/c/data/z", 3},
+ {"a/b/c/data/x/y/z", 4},
+ })
+ checkGlobMatch(t, "*.txt, txt, **.txt, **txt, **txt*", []indexerMatchList{
+ {"my.txt", 0},
+ {"don't", -1},
+ {"mytxt", 3},
+ {"/data/my.txt", 2},
+ {"data/my.txt", 2},
+ {"data/txt", 3},
+ {"data/thistxtfile", 4},
+ {"/data/thistxtfile", 4},
+ })
+ checkGlobMatch(t, "data/**/*.txt, data/**.txt", []indexerMatchList{
+ {"data/a/b/c/d.txt", 0},
+ {"data/a.txt", 1},
+ })
+ checkGlobMatch(t, "**/*.txt, data/**.txt", []indexerMatchList{
+ {"data/a/b/c/d.txt", 0},
+ {"data/a.txt", 0},
+ {"a.txt", -1},
+ })
+}
+
+func checkGlobMatch(t *testing.T, globstr string, list []indexerMatchList) {
+ glist := IndexerGlobFromString(globstr)
+ if len(list) == 0 {
+ assert.Empty(t, glist)
+ return
+ }
+ assert.NotEmpty(t, glist)
+ for _, m := range list {
+ found := false
+ for pos, g := range glist {
+ if g.Match(m.value) {
+ assert.Equal(t, m.position, pos, "Test string `%s` doesn't match `%s`@%d, but matches @%d", m.value, globstr, m.position, pos)
+ found = true
+ break
+ }
+ }
+ if !found {
+ assert.Equal(t, m.position, -1, "Test string `%s` doesn't match `%s` anywhere; expected @%d", m.value, globstr, m.position)
+ }
+ }
+}
diff --git a/modules/setting/lfs.go b/modules/setting/lfs.go
new file mode 100644
index 0000000..7501017
--- /dev/null
+++ b/modules/setting/lfs.go
@@ -0,0 +1,82 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "time"
+
+ "code.gitea.io/gitea/modules/generate"
+)
+
+// LFS represents the configuration for Git LFS
+var LFS = struct {
+ StartServer bool `ini:"LFS_START_SERVER"`
+ JWTSecretBytes []byte `ini:"-"`
+ HTTPAuthExpiry time.Duration `ini:"LFS_HTTP_AUTH_EXPIRY"`
+ MaxFileSize int64 `ini:"LFS_MAX_FILE_SIZE"`
+ LocksPagingNum int `ini:"LFS_LOCKS_PAGING_NUM"`
+
+ Storage *Storage
+}{}
+
+func loadLFSFrom(rootCfg ConfigProvider) error {
+ sec := rootCfg.Section("server")
+ if err := sec.MapTo(&LFS); err != nil {
+ return fmt.Errorf("failed to map LFS settings: %v", err)
+ }
+
+ lfsSec, _ := rootCfg.GetSection("lfs")
+
+ // Specifically default PATH to LFS_CONTENT_PATH
+ // DEPRECATED should not be removed because users maybe upgrade from lower version to the latest version
+ // if these are removed, the warning will not be shown
+ deprecatedSetting(rootCfg, "server", "LFS_CONTENT_PATH", "lfs", "PATH", "v1.19.0")
+
+ if val := sec.Key("LFS_CONTENT_PATH").String(); val != "" {
+ if lfsSec == nil {
+ lfsSec = rootCfg.Section("lfs")
+ }
+ lfsSec.Key("PATH").MustString(val)
+ }
+
+ var err error
+ LFS.Storage, err = getStorage(rootCfg, "lfs", "", lfsSec)
+ if err != nil {
+ return err
+ }
+
+ // Rest of LFS service settings
+ if LFS.LocksPagingNum == 0 {
+ LFS.LocksPagingNum = 50
+ }
+
+ LFS.HTTPAuthExpiry = sec.Key("LFS_HTTP_AUTH_EXPIRY").MustDuration(24 * time.Hour)
+
+ if !LFS.StartServer || !InstallLock {
+ return nil
+ }
+
+ jwtSecretBase64 := loadSecret(rootCfg.Section("server"), "LFS_JWT_SECRET_URI", "LFS_JWT_SECRET")
+ LFS.JWTSecretBytes, err = generate.DecodeJwtSecret(jwtSecretBase64)
+ if err != nil {
+ LFS.JWTSecretBytes, jwtSecretBase64, err = generate.NewJwtSecret()
+ if err != nil {
+ return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
+ }
+
+ // Save secret
+ saveCfg, err := rootCfg.PrepareSaving()
+ if err != nil {
+ return fmt.Errorf("error saving JWT Secret for custom config: %v", err)
+ }
+ rootCfg.Section("server").Key("LFS_JWT_SECRET").SetValue(jwtSecretBase64)
+ saveCfg.Section("server").Key("LFS_JWT_SECRET").SetValue(jwtSecretBase64)
+ if err := saveCfg.Save(); err != nil {
+ return fmt.Errorf("error saving JWT Secret for custom config: %v", err)
+ }
+ }
+
+ return nil
+}
diff --git a/modules/setting/lfs_test.go b/modules/setting/lfs_test.go
new file mode 100644
index 0000000..c7f1637
--- /dev/null
+++ b/modules/setting/lfs_test.go
@@ -0,0 +1,102 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getStorageInheritNameSectionTypeForLFS(t *testing.T) {
+ iniStr := `
+ [storage]
+ STORAGE_TYPE = minio
+ `
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "minio", LFS.Storage.Type)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+
+ iniStr = `
+[server]
+LFS_CONTENT_PATH = path_ignored
+[lfs]
+PATH = path_used
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "local", LFS.Storage.Type)
+ assert.Contains(t, LFS.Storage.Path, "path_used")
+
+ iniStr = `
+[server]
+LFS_CONTENT_PATH = deprecatedpath
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "local", LFS.Storage.Type)
+ assert.Contains(t, LFS.Storage.Path, "deprecatedpath")
+
+ iniStr = `
+[storage.lfs]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "minio", LFS.Storage.Type)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+
+ iniStr = `
+[lfs]
+STORAGE_TYPE = my_minio
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "minio", LFS.Storage.Type)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+
+ iniStr = `
+[lfs]
+STORAGE_TYPE = my_minio
+MINIO_BASE_PATH = my_lfs/
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+
+ assert.EqualValues(t, "minio", LFS.Storage.Type)
+ assert.EqualValues(t, "my_lfs/", LFS.Storage.MinioConfig.BasePath)
+}
+
+func Test_LFSStorage1(t *testing.T) {
+ iniStr := `
+[storage]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadLFSFrom(cfg))
+ assert.EqualValues(t, "minio", LFS.Storage.Type)
+ assert.EqualValues(t, "gitea", LFS.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+}
diff --git a/modules/setting/log.go b/modules/setting/log.go
new file mode 100644
index 0000000..a141188
--- /dev/null
+++ b/modules/setting/log.go
@@ -0,0 +1,270 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ golog "log"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+)
+
+type LogGlobalConfig struct {
+ RootPath string
+
+ Mode string
+ Level log.Level
+ StacktraceLogLevel log.Level
+ BufferLen int
+
+ EnableSSHLog bool
+
+ AccessLogTemplate string
+ RequestIDHeaders []string
+}
+
+var Log LogGlobalConfig
+
+const accessLogTemplateDefault = `{{.Ctx.RemoteHost}} - {{.Identity}} {{.Start.Format "[02/Jan/2006:15:04:05 -0700]" }} "{{.Ctx.Req.Method}} {{.Ctx.Req.URL.RequestURI}} {{.Ctx.Req.Proto}}" {{.ResponseWriter.Status}} {{.ResponseWriter.Size}} "{{.Ctx.Req.Referer}}" "{{.Ctx.Req.UserAgent}}"`
+
+func loadLogGlobalFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("log")
+
+ Log.Level = log.LevelFromString(sec.Key("LEVEL").MustString(log.INFO.String()))
+ Log.StacktraceLogLevel = log.LevelFromString(sec.Key("STACKTRACE_LEVEL").MustString(log.NONE.String()))
+ Log.BufferLen = sec.Key("BUFFER_LEN").MustInt(10000)
+ Log.Mode = sec.Key("MODE").MustString("console")
+
+ Log.RootPath = sec.Key("ROOT_PATH").MustString(path.Join(AppWorkPath, "log"))
+ if !filepath.IsAbs(Log.RootPath) {
+ Log.RootPath = filepath.Join(AppWorkPath, Log.RootPath)
+ }
+ Log.RootPath = util.FilePathJoinAbs(Log.RootPath)
+
+ Log.EnableSSHLog = sec.Key("ENABLE_SSH_LOG").MustBool(false)
+
+ Log.AccessLogTemplate = sec.Key("ACCESS_LOG_TEMPLATE").MustString(accessLogTemplateDefault)
+ Log.RequestIDHeaders = sec.Key("REQUEST_ID_HEADERS").Strings(",")
+}
+
+func prepareLoggerConfig(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("log")
+
+ if !sec.HasKey("logger.default.MODE") {
+ sec.Key("logger.default.MODE").MustString(",")
+ }
+
+ deprecatedSetting(rootCfg, "log", "ACCESS", "log", "logger.access.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "ENABLE_ACCESS_LOG", "log", "logger.access.MODE", "1.21")
+ if val := sec.Key("ACCESS").String(); val != "" {
+ sec.Key("logger.access.MODE").MustString(val)
+ }
+ if sec.HasKey("ENABLE_ACCESS_LOG") && !sec.Key("ENABLE_ACCESS_LOG").MustBool() {
+ sec.Key("logger.access.MODE").SetValue("")
+ }
+
+ deprecatedSetting(rootCfg, "log", "ROUTER", "log", "logger.router.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "DISABLE_ROUTER_LOG", "log", "logger.router.MODE", "1.21")
+ if val := sec.Key("ROUTER").String(); val != "" {
+ sec.Key("logger.router.MODE").MustString(val)
+ }
+ if !sec.HasKey("logger.router.MODE") {
+ sec.Key("logger.router.MODE").MustString(",") // use default logger
+ }
+ if sec.HasKey("DISABLE_ROUTER_LOG") && sec.Key("DISABLE_ROUTER_LOG").MustBool() {
+ sec.Key("logger.router.MODE").SetValue("")
+ }
+
+ deprecatedSetting(rootCfg, "log", "XORM", "log", "logger.xorm.MODE", "1.21")
+ deprecatedSetting(rootCfg, "log", "ENABLE_XORM_LOG", "log", "logger.xorm.MODE", "1.21")
+ if val := sec.Key("XORM").String(); val != "" {
+ sec.Key("logger.xorm.MODE").MustString(val)
+ }
+ if !sec.HasKey("logger.xorm.MODE") {
+ sec.Key("logger.xorm.MODE").MustString(",") // use default logger
+ }
+ if sec.HasKey("ENABLE_XORM_LOG") && !sec.Key("ENABLE_XORM_LOG").MustBool() {
+ sec.Key("logger.xorm.MODE").SetValue("")
+ }
+}
+
+func LogPrepareFilenameForWriter(fileName, defaultFileName string) string {
+ if fileName == "" {
+ fileName = defaultFileName
+ }
+ if !filepath.IsAbs(fileName) {
+ fileName = filepath.Join(Log.RootPath, fileName)
+ } else {
+ fileName = filepath.Clean(fileName)
+ }
+ if err := os.MkdirAll(filepath.Dir(fileName), os.ModePerm); err != nil {
+ panic(fmt.Sprintf("unable to create directory for log %q: %v", fileName, err.Error()))
+ }
+ return fileName
+}
+
+func loadLogModeByName(rootCfg ConfigProvider, loggerName, modeName string) (writerName, writerType string, writerMode log.WriterMode, err error) {
+ sec := rootCfg.Section("log." + modeName)
+
+ writerMode = log.WriterMode{}
+ writerType = ConfigSectionKeyString(sec, "MODE")
+ if writerType == "" {
+ writerType = modeName
+ }
+
+ writerName = modeName
+ defaultFlags := "stdflags"
+ defaultFilaName := "gitea.log"
+ if loggerName == "access" {
+ // "access" logger is special, by default it doesn't have output flags, so it also needs a new writer name to avoid conflicting with other writers.
+ // so "access" logger's writer name is usually "file.access" or "console.access"
+ writerName += ".access"
+ defaultFlags = "none"
+ defaultFilaName = "access.log"
+ }
+
+ writerMode.Level = log.LevelFromString(ConfigInheritedKeyString(sec, "LEVEL", Log.Level.String()))
+ writerMode.StacktraceLevel = log.LevelFromString(ConfigInheritedKeyString(sec, "STACKTRACE_LEVEL", Log.StacktraceLogLevel.String()))
+ writerMode.Prefix = ConfigInheritedKeyString(sec, "PREFIX")
+ writerMode.Expression = ConfigInheritedKeyString(sec, "EXPRESSION")
+ // flags are updated and set below
+
+ switch writerType {
+ case "console":
+ // if stderr is on journald, prefer stderr by default
+ useStderr := ConfigInheritedKey(sec, "STDERR").MustBool(log.JournaldOnStderr)
+ defaultCanColor := log.CanColorStdout
+ defaultJournald := log.JournaldOnStdout
+ if useStderr {
+ defaultCanColor = log.CanColorStderr
+ defaultJournald = log.JournaldOnStderr
+ }
+ writerOption := log.WriterConsoleOption{Stderr: useStderr}
+ writerMode.Colorize = ConfigInheritedKey(sec, "COLORIZE").MustBool(defaultCanColor)
+ writerMode.WriterOption = writerOption
+ // if we are ultimately on journald, update default flags
+ if defaultJournald {
+ defaultFlags = "journaldflags"
+ }
+ case "file":
+ fileName := LogPrepareFilenameForWriter(ConfigInheritedKey(sec, "FILE_NAME").String(), defaultFilaName)
+ writerOption := log.WriterFileOption{}
+ writerOption.FileName = fileName + filenameSuffix // FIXME: the suffix doesn't seem right, see its related comments
+ writerOption.LogRotate = ConfigInheritedKey(sec, "LOG_ROTATE").MustBool(true)
+ writerOption.MaxSize = 1 << uint(ConfigInheritedKey(sec, "MAX_SIZE_SHIFT").MustInt(28))
+ writerOption.DailyRotate = ConfigInheritedKey(sec, "DAILY_ROTATE").MustBool(true)
+ writerOption.MaxDays = ConfigInheritedKey(sec, "MAX_DAYS").MustInt(7)
+ writerOption.Compress = ConfigInheritedKey(sec, "COMPRESS").MustBool(true)
+ writerOption.CompressionLevel = ConfigInheritedKey(sec, "COMPRESSION_LEVEL").MustInt(-1)
+ writerMode.WriterOption = writerOption
+ case "conn":
+ writerOption := log.WriterConnOption{}
+ writerOption.ReconnectOnMsg = ConfigInheritedKey(sec, "RECONNECT_ON_MSG").MustBool()
+ writerOption.Reconnect = ConfigInheritedKey(sec, "RECONNECT").MustBool()
+ writerOption.Protocol = ConfigInheritedKey(sec, "PROTOCOL").In("tcp", []string{"tcp", "unix", "udp"})
+ writerOption.Addr = ConfigInheritedKey(sec, "ADDR").MustString(":7020")
+ writerMode.WriterOption = writerOption
+ default:
+ if !log.HasEventWriter(writerType) {
+ return "", "", writerMode, fmt.Errorf("invalid log writer type (mode): %s, maybe it needs something like 'MODE=file' in [log.%s] section", writerType, modeName)
+ }
+ }
+
+ // set flags last because the console writer code may update default flags
+ writerMode.Flags = log.FlagsFromString(ConfigInheritedKeyString(sec, "FLAGS", defaultFlags))
+
+ return writerName, writerType, writerMode, nil
+}
+
+var filenameSuffix = ""
+
+// RestartLogsWithPIDSuffix restarts the logs with a PID suffix on files
+// FIXME: it seems not right, it breaks log rotating or log collectors
+func RestartLogsWithPIDSuffix() {
+ filenameSuffix = fmt.Sprintf(".%d", os.Getpid())
+ initAllLoggers() // when forking, before restarting, rename logger file and re-init all loggers
+}
+
+func InitLoggersForTest() {
+ initAllLoggers()
+}
+
+// initAllLoggers creates all the log services
+func initAllLoggers() {
+ initManagedLoggers(log.GetManager(), CfgProvider)
+
+ golog.SetFlags(0)
+ golog.SetPrefix("")
+ golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
+}
+
+func initManagedLoggers(manager *log.LoggerManager, cfg ConfigProvider) {
+ loadLogGlobalFrom(cfg)
+ prepareLoggerConfig(cfg)
+
+ initLoggerByName(manager, cfg, log.DEFAULT) // default
+ initLoggerByName(manager, cfg, "access")
+ initLoggerByName(manager, cfg, "router")
+ initLoggerByName(manager, cfg, "xorm")
+}
+
+func initLoggerByName(manager *log.LoggerManager, rootCfg ConfigProvider, loggerName string) {
+ sec := rootCfg.Section("log")
+ keyPrefix := "logger." + loggerName
+
+ disabled := sec.HasKey(keyPrefix+".MODE") && sec.Key(keyPrefix+".MODE").String() == ""
+ if disabled {
+ return
+ }
+
+ modeVal := sec.Key(keyPrefix + ".MODE").String()
+ if modeVal == "," {
+ modeVal = Log.Mode
+ }
+
+ var eventWriters []log.EventWriter
+ modes := strings.Split(modeVal, ",")
+ for _, modeName := range modes {
+ modeName = strings.TrimSpace(modeName)
+ if modeName == "" {
+ continue
+ }
+ writerName, writerType, writerMode, err := loadLogModeByName(rootCfg, loggerName, modeName)
+ if err != nil {
+ log.FallbackErrorf("Failed to load writer mode %q for logger %s: %v", modeName, loggerName, err)
+ continue
+ }
+ if writerMode.BufferLen == 0 {
+ writerMode.BufferLen = Log.BufferLen
+ }
+ eventWriter := manager.GetSharedWriter(writerName)
+ if eventWriter == nil {
+ eventWriter, err = manager.NewSharedWriter(writerName, writerType, writerMode)
+ if err != nil {
+ log.FallbackErrorf("Failed to create event writer for logger %s: %v", loggerName, err)
+ continue
+ }
+ }
+ eventWriters = append(eventWriters, eventWriter)
+ }
+
+ manager.GetLogger(loggerName).ReplaceAllWriters(eventWriters...)
+}
+
+func InitSQLLoggersForCli(level log.Level) {
+ log.SetConsoleLogger("xorm", "console", level)
+}
+
+func IsAccessLogEnabled() bool {
+ return log.IsLoggerEnabled("access")
+}
+
+func IsRouteLogEnabled() bool {
+ return log.IsLoggerEnabled("router")
+}
diff --git a/modules/setting/log_test.go b/modules/setting/log_test.go
new file mode 100644
index 0000000..3134d3e
--- /dev/null
+++ b/modules/setting/log_test.go
@@ -0,0 +1,386 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+
+ "github.com/stretchr/testify/require"
+)
+
+func initLoggersByConfig(t *testing.T, config string) (*log.LoggerManager, func()) {
+ oldLogConfig := Log
+ Log = LogGlobalConfig{}
+ defer func() {
+ Log = oldLogConfig
+ }()
+
+ cfg, err := NewConfigProviderFromData(config)
+ require.NoError(t, err)
+
+ manager := log.NewManager()
+ initManagedLoggers(manager, cfg)
+ return manager, manager.Close
+}
+
+func toJSON(v any) string {
+ b, _ := json.MarshalIndent(v, "", "\t")
+ return string(b)
+}
+
+func TestLogConfigDefault(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, ``)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+}
+
+func TestLogConfigDisable(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+logger.router.MODE =
+logger.xorm.MODE =
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+}
+
+func TestLogConfigLegacyDefault(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+MODE = console
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+}
+
+func TestLogConfigLegacyMode(t *testing.T) {
+ tempDir := t.TempDir()
+
+ tempPath := func(file string) string {
+ return filepath.Join(tempDir, file)
+ }
+
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROOT_PATH = `+tempDir+`
+MODE = file
+ROUTER = file
+ACCESS = file
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "file": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ }
+}
+`
+ writerDumpAccess := `
+{
+ "file.access": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "none",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ }
+}
+`
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDump, "$FILENAME", tempPath("gitea.log")), toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDumpAccess, "$FILENAME", tempPath("access.log")), toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, strings.ReplaceAll(writerDump, "$FILENAME", tempPath("gitea.log")), toJSON(dump))
+}
+
+func TestLogConfigLegacyModeDisable(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROUTER = file
+ACCESS = file
+DISABLE_ROUTER_LOG = true
+ENABLE_ACCESS_LOG = false
+`)
+ defer managerClose()
+
+ dump := manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+
+ dump = manager.GetLogger("router").DumpWriters()
+ require.JSONEq(t, "{}", toJSON(dump))
+}
+
+func TestLogConfigNewConfig(t *testing.T) {
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+logger.access.MODE = console
+logger.xorm.MODE = console, console-1
+
+[log.console]
+LEVEL = warn
+
+[log.console-1]
+MODE = console
+LEVEL = error
+STDERR = true
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "console": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "warn",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ },
+ "console-1": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "error",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": true
+ },
+ "WriterType": "console"
+ }
+}
+`
+ writerDumpAccess := `
+{
+ "console.access": {
+ "BufferLen": 10000,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "none",
+ "Level": "warn",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Stderr": false
+ },
+ "WriterType": "console"
+ }
+}
+`
+ dump := manager.GetLogger("xorm").DumpWriters()
+ require.JSONEq(t, writerDump, toJSON(dump))
+
+ dump = manager.GetLogger("access").DumpWriters()
+ require.JSONEq(t, writerDumpAccess, toJSON(dump))
+}
+
+func TestLogConfigModeFile(t *testing.T) {
+ tempDir := t.TempDir()
+
+ tempPath := func(file string) string {
+ return filepath.Join(tempDir, file)
+ }
+
+ manager, managerClose := initLoggersByConfig(t, `
+[log]
+ROOT_PATH = `+tempDir+`
+BUFFER_LEN = 10
+MODE = file, file1
+
+[log.file1]
+MODE = file
+LEVEL = error
+STACKTRACE_LEVEL = fatal
+EXPRESSION = filter
+FLAGS = medfile
+PREFIX = "[Prefix] "
+FILE_NAME = file-xxx.log
+LOG_ROTATE = false
+MAX_SIZE_SHIFT = 1
+DAILY_ROTATE = false
+MAX_DAYS = 90
+COMPRESS = false
+COMPRESSION_LEVEL = 4
+`)
+ defer managerClose()
+
+ writerDump := `
+{
+ "file": {
+ "BufferLen": 10,
+ "Colorize": false,
+ "Expression": "",
+ "Flags": "stdflags",
+ "Level": "info",
+ "Prefix": "",
+ "StacktraceLevel": "none",
+ "WriterOption": {
+ "Compress": true,
+ "CompressionLevel": -1,
+ "DailyRotate": true,
+ "FileName": "$FILENAME-0",
+ "LogRotate": true,
+ "MaxDays": 7,
+ "MaxSize": 268435456
+ },
+ "WriterType": "file"
+ },
+ "file1": {
+ "BufferLen": 10,
+ "Colorize": false,
+ "Expression": "filter",
+ "Flags": "medfile",
+ "Level": "error",
+ "Prefix": "[Prefix] ",
+ "StacktraceLevel": "fatal",
+ "WriterOption": {
+ "Compress": false,
+ "CompressionLevel": 4,
+ "DailyRotate": false,
+ "FileName": "$FILENAME-1",
+ "LogRotate": false,
+ "MaxDays": 90,
+ "MaxSize": 2
+ },
+ "WriterType": "file"
+ }
+}
+`
+
+ dump := manager.GetLogger(log.DEFAULT).DumpWriters()
+ expected := writerDump
+ expected = strings.ReplaceAll(expected, "$FILENAME-0", tempPath("gitea.log"))
+ expected = strings.ReplaceAll(expected, "$FILENAME-1", tempPath("file-xxx.log"))
+ require.JSONEq(t, expected, toJSON(dump))
+}
diff --git a/modules/setting/mailer.go b/modules/setting/mailer.go
new file mode 100644
index 0000000..136d932
--- /dev/null
+++ b/modules/setting/mailer.go
@@ -0,0 +1,309 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "context"
+ "net"
+ "net/mail"
+ "strings"
+ "text/template"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+
+ shellquote "github.com/kballard/go-shellquote"
+)
+
+// Mailer represents mail service.
+type Mailer struct {
+ // Mailer
+ Name string `ini:"NAME"`
+ From string `ini:"FROM"`
+ EnvelopeFrom string `ini:"ENVELOPE_FROM"`
+ OverrideEnvelopeFrom bool `ini:"-"`
+ FromName string `ini:"-"`
+ FromEmail string `ini:"-"`
+ SendAsPlainText bool `ini:"SEND_AS_PLAIN_TEXT"`
+ SubjectPrefix string `ini:"SUBJECT_PREFIX"`
+ OverrideHeader map[string][]string `ini:"-"`
+
+ // SMTP sender
+ Protocol string `ini:"PROTOCOL"`
+ SMTPAddr string `ini:"SMTP_ADDR"`
+ SMTPPort string `ini:"SMTP_PORT"`
+ User string `ini:"USER"`
+ Passwd string `ini:"PASSWD"`
+ EnableHelo bool `ini:"ENABLE_HELO"`
+ HeloHostname string `ini:"HELO_HOSTNAME"`
+ ForceTrustServerCert bool `ini:"FORCE_TRUST_SERVER_CERT"`
+ UseClientCert bool `ini:"USE_CLIENT_CERT"`
+ ClientCertFile string `ini:"CLIENT_CERT_FILE"`
+ ClientKeyFile string `ini:"CLIENT_KEY_FILE"`
+
+ // Sendmail sender
+ SendmailPath string `ini:"SENDMAIL_PATH"`
+ SendmailArgs []string `ini:"-"`
+ SendmailTimeout time.Duration `ini:"SENDMAIL_TIMEOUT"`
+ SendmailConvertCRLF bool `ini:"SENDMAIL_CONVERT_CRLF"`
+
+ // Customization
+ FromDisplayNameFormat string `ini:"FROM_DISPLAY_NAME_FORMAT"`
+ FromDisplayNameFormatTemplate *template.Template `ini:"-"`
+}
+
+// MailService the global mailer
+var MailService *Mailer
+
+func loadMailsFrom(rootCfg ConfigProvider) {
+ loadMailerFrom(rootCfg)
+ loadRegisterMailFrom(rootCfg)
+ loadNotifyMailFrom(rootCfg)
+ loadIncomingEmailFrom(rootCfg)
+}
+
+func loadMailerFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("mailer")
+ // Check mailer setting.
+ if !sec.Key("ENABLED").MustBool() {
+ return
+ }
+
+ // Handle Deprecations and map on to new configuration
+ // DEPRECATED should not be removed because users maybe upgrade from lower version to the latest version
+ // if these are removed, the warning will not be shown
+ deprecatedSetting(rootCfg, "mailer", "MAILER_TYPE", "mailer", "PROTOCOL", "v1.19.0")
+ if sec.HasKey("MAILER_TYPE") && !sec.HasKey("PROTOCOL") {
+ if sec.Key("MAILER_TYPE").String() == "sendmail" {
+ sec.Key("PROTOCOL").MustString("sendmail")
+ }
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "HOST", "mailer", "SMTP_ADDR", "v1.19.0")
+ if sec.HasKey("HOST") && !sec.HasKey("SMTP_ADDR") {
+ givenHost := sec.Key("HOST").String()
+ addr, port, err := net.SplitHostPort(givenHost)
+ if err != nil && strings.Contains(err.Error(), "missing port in address") {
+ addr = givenHost
+ } else if err != nil {
+ log.Fatal("Invalid mailer.HOST (%s): %v", givenHost, err)
+ }
+ if addr == "" {
+ addr = "127.0.0.1"
+ }
+ sec.Key("SMTP_ADDR").MustString(addr)
+ sec.Key("SMTP_PORT").MustString(port)
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "IS_TLS_ENABLED", "mailer", "PROTOCOL", "v1.19.0")
+ if sec.HasKey("IS_TLS_ENABLED") && !sec.HasKey("PROTOCOL") {
+ if sec.Key("IS_TLS_ENABLED").MustBool() {
+ sec.Key("PROTOCOL").MustString("smtps")
+ } else {
+ sec.Key("PROTOCOL").MustString("smtp+starttls")
+ }
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "DISABLE_HELO", "mailer", "ENABLE_HELO", "v1.19.0")
+ if sec.HasKey("DISABLE_HELO") && !sec.HasKey("ENABLE_HELO") {
+ sec.Key("ENABLE_HELO").MustBool(!sec.Key("DISABLE_HELO").MustBool())
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "SKIP_VERIFY", "mailer", "FORCE_TRUST_SERVER_CERT", "v1.19.0")
+ if sec.HasKey("SKIP_VERIFY") && !sec.HasKey("FORCE_TRUST_SERVER_CERT") {
+ sec.Key("FORCE_TRUST_SERVER_CERT").MustBool(sec.Key("SKIP_VERIFY").MustBool())
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "USE_CERTIFICATE", "mailer", "USE_CLIENT_CERT", "v1.19.0")
+ if sec.HasKey("USE_CERTIFICATE") && !sec.HasKey("USE_CLIENT_CERT") {
+ sec.Key("USE_CLIENT_CERT").MustBool(sec.Key("USE_CERTIFICATE").MustBool())
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "CERT_FILE", "mailer", "CLIENT_CERT_FILE", "v1.19.0")
+ if sec.HasKey("CERT_FILE") && !sec.HasKey("CLIENT_CERT_FILE") {
+ sec.Key("CERT_FILE").MustString(sec.Key("CERT_FILE").String())
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "KEY_FILE", "mailer", "CLIENT_KEY_FILE", "v1.19.0")
+ if sec.HasKey("KEY_FILE") && !sec.HasKey("CLIENT_KEY_FILE") {
+ sec.Key("KEY_FILE").MustString(sec.Key("KEY_FILE").String())
+ }
+
+ deprecatedSetting(rootCfg, "mailer", "ENABLE_HTML_ALTERNATIVE", "mailer", "SEND_AS_PLAIN_TEXT", "v1.19.0")
+ if sec.HasKey("ENABLE_HTML_ALTERNATIVE") && !sec.HasKey("SEND_AS_PLAIN_TEXT") {
+ sec.Key("SEND_AS_PLAIN_TEXT").MustBool(!sec.Key("ENABLE_HTML_ALTERNATIVE").MustBool(false))
+ }
+
+ if sec.HasKey("PROTOCOL") && sec.Key("PROTOCOL").String() == "smtp+startls" {
+ log.Error("Deprecated fallback `[mailer]` `PROTOCOL = smtp+startls` present. Use `[mailer]` `PROTOCOL = smtp+starttls`` instead. This fallback will be removed in v1.19.0")
+ sec.Key("PROTOCOL").SetValue("smtp+starttls")
+ }
+
+ // Handle aliases
+ if sec.HasKey("USERNAME") && !sec.HasKey("USER") {
+ sec.Key("USER").SetValue(sec.Key("USERNAME").String())
+ }
+ if sec.HasKey("PASSWORD") && !sec.HasKey("PASSWD") {
+ sec.Key("PASSWD").SetValue(sec.Key("PASSWORD").String())
+ }
+
+ // Set default values & validate
+ sec.Key("NAME").MustString(AppName)
+ sec.Key("PROTOCOL").In("", []string{"smtp", "smtps", "smtp+starttls", "smtp+unix", "sendmail", "dummy"})
+ sec.Key("ENABLE_HELO").MustBool(true)
+ sec.Key("FORCE_TRUST_SERVER_CERT").MustBool(false)
+ sec.Key("USE_CLIENT_CERT").MustBool(false)
+ sec.Key("SENDMAIL_PATH").MustString("sendmail")
+ sec.Key("SENDMAIL_TIMEOUT").MustDuration(5 * time.Minute)
+ sec.Key("SENDMAIL_CONVERT_CRLF").MustBool(true)
+ sec.Key("FROM").MustString(sec.Key("USER").String())
+
+ // Now map the values on to the MailService
+ MailService = &Mailer{}
+ if err := sec.MapTo(MailService); err != nil {
+ log.Fatal("Unable to map [mailer] section on to MailService. Error: %v", err)
+ }
+
+ overrideHeader := rootCfg.Section("mailer.override_header").Keys()
+ MailService.OverrideHeader = make(map[string][]string)
+ for _, key := range overrideHeader {
+ MailService.OverrideHeader[key.Name()] = key.Strings(",")
+ }
+
+ // Infer SMTPPort if not set
+ if MailService.SMTPPort == "" {
+ switch MailService.Protocol {
+ case "smtp":
+ MailService.SMTPPort = "25"
+ case "smtps":
+ MailService.SMTPPort = "465"
+ case "smtp+starttls":
+ MailService.SMTPPort = "587"
+ }
+ }
+
+ // Infer Protocol
+ if MailService.Protocol == "" {
+ if strings.ContainsAny(MailService.SMTPAddr, "/\\") {
+ MailService.Protocol = "smtp+unix"
+ } else {
+ switch MailService.SMTPPort {
+ case "25":
+ MailService.Protocol = "smtp"
+ case "465":
+ MailService.Protocol = "smtps"
+ case "587":
+ MailService.Protocol = "smtp+starttls"
+ default:
+ log.Error("unable to infer unspecified mailer.PROTOCOL from mailer.SMTP_PORT = %q, assume using smtps", MailService.SMTPPort)
+ MailService.Protocol = "smtps"
+ if MailService.SMTPPort == "" {
+ MailService.SMTPPort = "465"
+ }
+ }
+ }
+ }
+
+ // we want to warn if users use SMTP on a non-local IP;
+ // we might as well take the opportunity to check that it has an IP at all
+ // This check is not needed for sendmail
+ switch MailService.Protocol {
+ case "sendmail":
+ var err error
+ MailService.SendmailArgs, err = shellquote.Split(sec.Key("SENDMAIL_ARGS").String())
+ if err != nil {
+ log.Error("Failed to parse Sendmail args: '%s' with error %v", sec.Key("SENDMAIL_ARGS").String(), err)
+ }
+ case "smtp", "smtps", "smtp+starttls", "smtp+unix":
+ ips := tryResolveAddr(MailService.SMTPAddr)
+ if MailService.Protocol == "smtp" {
+ for _, ip := range ips {
+ if !ip.IP.IsLoopback() {
+ log.Warn("connecting over insecure SMTP protocol to non-local address is not recommended")
+ break
+ }
+ }
+ }
+ case "dummy": // just mention and do nothing
+ }
+
+ if MailService.From != "" {
+ parsed, err := mail.ParseAddress(MailService.From)
+ if err != nil {
+ log.Fatal("Invalid mailer.FROM (%s): %v", MailService.From, err)
+ }
+ MailService.FromName = parsed.Name
+ MailService.FromEmail = parsed.Address
+ } else {
+ log.Error("no mailer.FROM provided, email system may not work.")
+ }
+
+ MailService.FromDisplayNameFormatTemplate, _ = template.New("mailFrom").Parse("{{ .DisplayName }}")
+ if MailService.FromDisplayNameFormat != "" {
+ template, err := template.New("mailFrom").Parse(MailService.FromDisplayNameFormat)
+ if err != nil {
+ log.Error("mailer.FROM_DISPLAY_NAME_FORMAT is no valid template: %v", err)
+ } else {
+ MailService.FromDisplayNameFormatTemplate = template
+ }
+ }
+
+ switch MailService.EnvelopeFrom {
+ case "":
+ MailService.OverrideEnvelopeFrom = false
+ case "<>":
+ MailService.EnvelopeFrom = ""
+ MailService.OverrideEnvelopeFrom = true
+ default:
+ parsed, err := mail.ParseAddress(MailService.EnvelopeFrom)
+ if err != nil {
+ log.Fatal("Invalid mailer.ENVELOPE_FROM (%s): %v", MailService.EnvelopeFrom, err)
+ }
+ MailService.OverrideEnvelopeFrom = true
+ MailService.EnvelopeFrom = parsed.Address
+ }
+
+ log.Info("Mail Service Enabled")
+}
+
+func loadRegisterMailFrom(rootCfg ConfigProvider) {
+ if !rootCfg.Section("service").Key("REGISTER_EMAIL_CONFIRM").MustBool() {
+ return
+ } else if MailService == nil {
+ log.Warn("Register Mail Service: Mail Service is not enabled")
+ return
+ }
+ Service.RegisterEmailConfirm = true
+ log.Info("Register Mail Service Enabled")
+}
+
+func loadNotifyMailFrom(rootCfg ConfigProvider) {
+ if !rootCfg.Section("service").Key("ENABLE_NOTIFY_MAIL").MustBool() {
+ return
+ } else if MailService == nil {
+ log.Warn("Notify Mail Service: Mail Service is not enabled")
+ return
+ }
+ Service.EnableNotifyMail = true
+ log.Info("Notify Mail Service Enabled")
+}
+
+func tryResolveAddr(addr string) []net.IPAddr {
+ if strings.HasPrefix(addr, "[") && strings.HasSuffix(addr, "]") {
+ addr = addr[1 : len(addr)-1]
+ }
+ ip := net.ParseIP(addr)
+ if ip != nil {
+ return []net.IPAddr{{IP: ip}}
+ }
+
+ ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
+ defer cancel()
+ ips, err := net.DefaultResolver.LookupIPAddr(ctx, addr)
+ if err != nil {
+ log.Warn("could not look up mailer.SMTP_ADDR: %v", err)
+ return nil
+ }
+ return ips
+}
diff --git a/modules/setting/mailer_test.go b/modules/setting/mailer_test.go
new file mode 100644
index 0000000..f8af4a7
--- /dev/null
+++ b/modules/setting/mailer_test.go
@@ -0,0 +1,54 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_loadMailerFrom(t *testing.T) {
+ kases := map[string]*Mailer{
+ "smtp.mydomain.com": {
+ SMTPAddr: "smtp.mydomain.com",
+ SMTPPort: "465",
+ },
+ "smtp.mydomain.com:123": {
+ SMTPAddr: "smtp.mydomain.com",
+ SMTPPort: "123",
+ },
+ ":123": {
+ SMTPAddr: "127.0.0.1",
+ SMTPPort: "123",
+ },
+ }
+ for host, kase := range kases {
+ t.Run(host, func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData("")
+ sec := cfg.Section("mailer")
+ sec.NewKey("ENABLED", "true")
+ sec.NewKey("HOST", host)
+
+ // Check mailer setting
+ loadMailerFrom(cfg)
+
+ assert.EqualValues(t, kase.SMTPAddr, MailService.SMTPAddr)
+ assert.EqualValues(t, kase.SMTPPort, MailService.SMTPPort)
+ })
+ }
+
+ t.Run("property aliases", func(t *testing.T) {
+ cfg, _ := NewConfigProviderFromData("")
+ sec := cfg.Section("mailer")
+ sec.NewKey("ENABLED", "true")
+ sec.NewKey("USERNAME", "jane.doe@example.com")
+ sec.NewKey("PASSWORD", "y0u'll n3v3r gUess th1S!!1")
+
+ loadMailerFrom(cfg)
+
+ assert.EqualValues(t, "jane.doe@example.com", MailService.User)
+ assert.EqualValues(t, "y0u'll n3v3r gUess th1S!!1", MailService.Passwd)
+ })
+}
diff --git a/modules/setting/markup.go b/modules/setting/markup.go
new file mode 100644
index 0000000..e893c1c
--- /dev/null
+++ b/modules/setting/markup.go
@@ -0,0 +1,192 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "regexp"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// ExternalMarkupRenderers represents the external markup renderers
+var (
+ ExternalMarkupRenderers []*MarkupRenderer
+ ExternalSanitizerRules []MarkupSanitizerRule
+ MermaidMaxSourceCharacters int
+ FilePreviewMaxLines int
+)
+
+const (
+ RenderContentModeSanitized = "sanitized"
+ RenderContentModeNoSanitizer = "no-sanitizer"
+ RenderContentModeIframe = "iframe"
+)
+
+// Markdown settings
+var Markdown = struct {
+ EnableHardLineBreakInComments bool
+ EnableHardLineBreakInDocuments bool
+ CustomURLSchemes []string `ini:"CUSTOM_URL_SCHEMES"`
+ FileExtensions []string
+ EnableMath bool
+}{
+ EnableHardLineBreakInComments: true,
+ EnableHardLineBreakInDocuments: false,
+ FileExtensions: strings.Split(".md,.markdown,.mdown,.mkd,.livemd", ","),
+ EnableMath: true,
+}
+
+// MarkupRenderer defines the external parser configured in ini
+type MarkupRenderer struct {
+ Enabled bool
+ MarkupName string
+ Command string
+ FileExtensions []string
+ IsInputFile bool
+ NeedPostProcess bool
+ MarkupSanitizerRules []MarkupSanitizerRule
+ RenderContentMode string
+}
+
+// MarkupSanitizerRule defines the policy for whitelisting attributes on
+// certain elements.
+type MarkupSanitizerRule struct {
+ Element string
+ AllowAttr string
+ Regexp *regexp.Regexp
+ AllowDataURIImages bool
+}
+
+func loadMarkupFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "markdown", &Markdown)
+
+ MermaidMaxSourceCharacters = rootCfg.Section("markup").Key("MERMAID_MAX_SOURCE_CHARACTERS").MustInt(5000)
+ FilePreviewMaxLines = rootCfg.Section("markup").Key("FILEPREVIEW_MAX_LINES").MustInt(50)
+ ExternalMarkupRenderers = make([]*MarkupRenderer, 0, 10)
+ ExternalSanitizerRules = make([]MarkupSanitizerRule, 0, 10)
+
+ for _, sec := range rootCfg.Section("markup").ChildSections() {
+ name := strings.TrimPrefix(sec.Name(), "markup.")
+ if name == "" {
+ log.Warn("name is empty, markup " + sec.Name() + "ignored")
+ continue
+ }
+
+ if name == "sanitizer" || strings.HasPrefix(name, "sanitizer.") {
+ newMarkupSanitizer(name, sec)
+ } else {
+ newMarkupRenderer(name, sec)
+ }
+ }
+}
+
+func newMarkupSanitizer(name string, sec ConfigSection) {
+ rule, ok := createMarkupSanitizerRule(name, sec)
+ if ok {
+ if strings.HasPrefix(name, "sanitizer.") {
+ names := strings.SplitN(strings.TrimPrefix(name, "sanitizer."), ".", 2)
+ name = names[0]
+ }
+ for _, renderer := range ExternalMarkupRenderers {
+ if name == renderer.MarkupName {
+ renderer.MarkupSanitizerRules = append(renderer.MarkupSanitizerRules, rule)
+ return
+ }
+ }
+ ExternalSanitizerRules = append(ExternalSanitizerRules, rule)
+ }
+}
+
+func createMarkupSanitizerRule(name string, sec ConfigSection) (MarkupSanitizerRule, bool) {
+ var rule MarkupSanitizerRule
+
+ ok := false
+ if sec.HasKey("ALLOW_DATA_URI_IMAGES") {
+ rule.AllowDataURIImages = sec.Key("ALLOW_DATA_URI_IMAGES").MustBool(false)
+ ok = true
+ }
+
+ if sec.HasKey("ELEMENT") || sec.HasKey("ALLOW_ATTR") {
+ rule.Element = sec.Key("ELEMENT").Value()
+ rule.AllowAttr = sec.Key("ALLOW_ATTR").Value()
+
+ if rule.Element == "" || rule.AllowAttr == "" {
+ log.Error("Missing required values from markup.%s. Must have ELEMENT and ALLOW_ATTR defined!", name)
+ return rule, false
+ }
+
+ regexpStr := sec.Key("REGEXP").Value()
+ if regexpStr != "" {
+ // Validate when parsing the config that this is a valid regular
+ // expression. Then we can use regexp.MustCompile(...) later.
+ compiled, err := regexp.Compile(regexpStr)
+ if err != nil {
+ log.Error("In markup.%s: REGEXP (%s) failed to compile: %v", name, regexpStr, err)
+ return rule, false
+ }
+
+ rule.Regexp = compiled
+ }
+
+ ok = true
+ }
+
+ if !ok {
+ log.Error("Missing required keys from markup.%s. Must have ELEMENT and ALLOW_ATTR or ALLOW_DATA_URI_IMAGES defined!", name)
+ return rule, false
+ }
+
+ return rule, true
+}
+
+func newMarkupRenderer(name string, sec ConfigSection) {
+ extensionReg := regexp.MustCompile(`\.\w`)
+
+ extensions := sec.Key("FILE_EXTENSIONS").Strings(",")
+ exts := make([]string, 0, len(extensions))
+ for _, extension := range extensions {
+ if !extensionReg.MatchString(extension) {
+ log.Warn(sec.Name() + " file extension " + extension + " is invalid. Extension ignored")
+ } else {
+ exts = append(exts, extension)
+ }
+ }
+
+ if len(exts) == 0 {
+ log.Warn(sec.Name() + " file extension is empty, markup " + name + " ignored")
+ return
+ }
+
+ command := sec.Key("RENDER_COMMAND").MustString("")
+ if command == "" {
+ log.Warn(" RENDER_COMMAND is empty, markup " + name + " ignored")
+ return
+ }
+
+ if sec.HasKey("DISABLE_SANITIZER") {
+ log.Error("Deprecated setting `[markup.*]` `DISABLE_SANITIZER` present. This fallback will be removed in v1.18.0")
+ }
+
+ renderContentMode := sec.Key("RENDER_CONTENT_MODE").MustString(RenderContentModeSanitized)
+ if !sec.HasKey("RENDER_CONTENT_MODE") && sec.Key("DISABLE_SANITIZER").MustBool(false) {
+ renderContentMode = RenderContentModeNoSanitizer // if only the legacy DISABLE_SANITIZER exists, use it
+ }
+ if renderContentMode != RenderContentModeSanitized &&
+ renderContentMode != RenderContentModeNoSanitizer &&
+ renderContentMode != RenderContentModeIframe {
+ log.Error("invalid RENDER_CONTENT_MODE: %q, default to %q", renderContentMode, RenderContentModeSanitized)
+ renderContentMode = RenderContentModeSanitized
+ }
+
+ ExternalMarkupRenderers = append(ExternalMarkupRenderers, &MarkupRenderer{
+ Enabled: sec.Key("ENABLED").MustBool(false),
+ MarkupName: name,
+ FileExtensions: exts,
+ Command: command,
+ IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false),
+ NeedPostProcess: sec.Key("NEED_POSTPROCESS").MustBool(true),
+ RenderContentMode: renderContentMode,
+ })
+}
diff --git a/modules/setting/metrics.go b/modules/setting/metrics.go
new file mode 100644
index 0000000..daa0e3b
--- /dev/null
+++ b/modules/setting/metrics.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Metrics settings
+var Metrics = struct {
+ Enabled bool
+ Token string
+ EnabledIssueByLabel bool
+ EnabledIssueByRepository bool
+}{
+ Enabled: false,
+ Token: "",
+ EnabledIssueByLabel: false,
+ EnabledIssueByRepository: false,
+}
+
+func loadMetricsFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "metrics", &Metrics)
+}
diff --git a/modules/setting/migrations.go b/modules/setting/migrations.go
new file mode 100644
index 0000000..5a6079b
--- /dev/null
+++ b/modules/setting/migrations.go
@@ -0,0 +1,28 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Migrations settings
+var Migrations = struct {
+ MaxAttempts int
+ RetryBackoff int
+ AllowedDomains string
+ BlockedDomains string
+ AllowLocalNetworks bool
+ SkipTLSVerify bool
+}{
+ MaxAttempts: 3,
+ RetryBackoff: 3,
+}
+
+func loadMigrationsFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("migrations")
+ Migrations.MaxAttempts = sec.Key("MAX_ATTEMPTS").MustInt(Migrations.MaxAttempts)
+ Migrations.RetryBackoff = sec.Key("RETRY_BACKOFF").MustInt(Migrations.RetryBackoff)
+
+ Migrations.AllowedDomains = sec.Key("ALLOWED_DOMAINS").MustString("")
+ Migrations.BlockedDomains = sec.Key("BLOCKED_DOMAINS").MustString("")
+ Migrations.AllowLocalNetworks = sec.Key("ALLOW_LOCALNETWORKS").MustBool(false)
+ Migrations.SkipTLSVerify = sec.Key("SKIP_TLS_VERIFY").MustBool(false)
+}
diff --git a/modules/setting/mime_type_map.go b/modules/setting/mime_type_map.go
new file mode 100644
index 0000000..55cb2c0
--- /dev/null
+++ b/modules/setting/mime_type_map.go
@@ -0,0 +1,28 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import "strings"
+
+// MimeTypeMap defines custom mime type mapping settings
+var MimeTypeMap = struct {
+ Enabled bool
+ Map map[string]string
+}{
+ Enabled: false,
+ Map: map[string]string{},
+}
+
+func loadMimeTypeMapFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("repository.mimetype_mapping")
+ keys := sec.Keys()
+ m := make(map[string]string, len(keys))
+ for _, key := range keys {
+ m[strings.ToLower(key.Name())] = key.Value()
+ }
+ MimeTypeMap.Map = m
+ if len(keys) > 0 {
+ MimeTypeMap.Enabled = true
+ }
+}
diff --git a/modules/setting/mirror.go b/modules/setting/mirror.go
new file mode 100644
index 0000000..3aa530a
--- /dev/null
+++ b/modules/setting/mirror.go
@@ -0,0 +1,58 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Mirror settings
+var Mirror = struct {
+ Enabled bool
+ DisableNewPull bool
+ DisableNewPush bool
+ DefaultInterval time.Duration
+ MinInterval time.Duration
+}{
+ Enabled: true,
+ DisableNewPull: false,
+ DisableNewPush: false,
+ MinInterval: 10 * time.Minute,
+ DefaultInterval: 8 * time.Hour,
+}
+
+func loadMirrorFrom(rootCfg ConfigProvider) {
+ // Handle old configuration through `[repository]` `DISABLE_MIRRORS`
+ // - please note this was badly named and only disabled the creation of new pull mirrors
+ // DEPRECATED should not be removed because users maybe upgrade from lower version to the latest version
+ // if these are removed, the warning will not be shown
+ deprecatedSetting(rootCfg, "repository", "DISABLE_MIRRORS", "mirror", "ENABLED", "v1.19.0")
+ if ConfigSectionKeyBool(rootCfg.Section("repository"), "DISABLE_MIRRORS") {
+ Mirror.DisableNewPull = true
+ }
+
+ if err := rootCfg.Section("mirror").MapTo(&Mirror); err != nil {
+ log.Fatal("Failed to map Mirror settings: %v", err)
+ }
+
+ if !Mirror.Enabled {
+ Mirror.DisableNewPull = true
+ Mirror.DisableNewPush = true
+ }
+
+ if Mirror.MinInterval.Minutes() < 1 {
+ log.Warn("Mirror.MinInterval is too low, set to 1 minute")
+ Mirror.MinInterval = 1 * time.Minute
+ }
+ if Mirror.DefaultInterval < Mirror.MinInterval {
+ if time.Hour*8 < Mirror.MinInterval {
+ Mirror.DefaultInterval = Mirror.MinInterval
+ } else {
+ Mirror.DefaultInterval = time.Hour * 8
+ }
+ log.Warn("Mirror.DefaultInterval is less than Mirror.MinInterval, set to %s", Mirror.DefaultInterval.String())
+ }
+}
diff --git a/modules/setting/oauth2.go b/modules/setting/oauth2.go
new file mode 100644
index 0000000..49288e2
--- /dev/null
+++ b/modules/setting/oauth2.go
@@ -0,0 +1,174 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "math"
+ "path/filepath"
+ "sync/atomic"
+
+ "code.gitea.io/gitea/modules/generate"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// OAuth2UsernameType is enum describing the way gitea 'name' should be generated from oauth2 data
+type OAuth2UsernameType string
+
+const (
+ // OAuth2UsernameUserid oauth2 userid field will be used as gitea name
+ OAuth2UsernameUserid OAuth2UsernameType = "userid"
+ // OAuth2UsernameNickname oauth2 nickname field will be used as gitea name
+ OAuth2UsernameNickname OAuth2UsernameType = "nickname"
+ // OAuth2UsernameEmail username of oauth2 email field will be used as gitea name
+ OAuth2UsernameEmail OAuth2UsernameType = "email"
+)
+
+func (username OAuth2UsernameType) isValid() bool {
+ switch username {
+ case OAuth2UsernameUserid, OAuth2UsernameNickname, OAuth2UsernameEmail:
+ return true
+ }
+ return false
+}
+
+// OAuth2AccountLinkingType is enum describing behaviour of linking with existing account
+type OAuth2AccountLinkingType string
+
+const (
+ // OAuth2AccountLinkingDisabled error will be displayed if account exist
+ OAuth2AccountLinkingDisabled OAuth2AccountLinkingType = "disabled"
+ // OAuth2AccountLinkingLogin account linking login will be displayed if account exist
+ OAuth2AccountLinkingLogin OAuth2AccountLinkingType = "login"
+ // OAuth2AccountLinkingAuto account will be automatically linked if account exist
+ OAuth2AccountLinkingAuto OAuth2AccountLinkingType = "auto"
+)
+
+func (accountLinking OAuth2AccountLinkingType) isValid() bool {
+ switch accountLinking {
+ case OAuth2AccountLinkingDisabled, OAuth2AccountLinkingLogin, OAuth2AccountLinkingAuto:
+ return true
+ }
+ return false
+}
+
+// OAuth2Client settings
+var OAuth2Client struct {
+ RegisterEmailConfirm bool
+ OpenIDConnectScopes []string
+ EnableAutoRegistration bool
+ Username OAuth2UsernameType
+ UpdateAvatar bool
+ AccountLinking OAuth2AccountLinkingType
+}
+
+func loadOAuth2ClientFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("oauth2_client")
+ OAuth2Client.RegisterEmailConfirm = sec.Key("REGISTER_EMAIL_CONFIRM").MustBool(Service.RegisterEmailConfirm)
+ OAuth2Client.OpenIDConnectScopes = parseScopes(sec, "OPENID_CONNECT_SCOPES")
+ OAuth2Client.EnableAutoRegistration = sec.Key("ENABLE_AUTO_REGISTRATION").MustBool()
+ OAuth2Client.Username = OAuth2UsernameType(sec.Key("USERNAME").MustString(string(OAuth2UsernameNickname)))
+ if !OAuth2Client.Username.isValid() {
+ log.Warn("Username setting is not valid: '%s', will fallback to '%s'", OAuth2Client.Username, OAuth2UsernameNickname)
+ OAuth2Client.Username = OAuth2UsernameNickname
+ }
+ OAuth2Client.UpdateAvatar = sec.Key("UPDATE_AVATAR").MustBool()
+ OAuth2Client.AccountLinking = OAuth2AccountLinkingType(sec.Key("ACCOUNT_LINKING").MustString(string(OAuth2AccountLinkingLogin)))
+ if !OAuth2Client.AccountLinking.isValid() {
+ log.Warn("Account linking setting is not valid: '%s', will fallback to '%s'", OAuth2Client.AccountLinking, OAuth2AccountLinkingLogin)
+ OAuth2Client.AccountLinking = OAuth2AccountLinkingLogin
+ }
+}
+
+func parseScopes(sec ConfigSection, name string) []string {
+ parts := sec.Key(name).Strings(" ")
+ scopes := make([]string, 0, len(parts))
+ for _, scope := range parts {
+ if scope != "" {
+ scopes = append(scopes, scope)
+ }
+ }
+ return scopes
+}
+
+var OAuth2 = struct {
+ Enabled bool
+ AccessTokenExpirationTime int64
+ RefreshTokenExpirationTime int64
+ InvalidateRefreshTokens bool
+ JWTSigningAlgorithm string `ini:"JWT_SIGNING_ALGORITHM"`
+ JWTSigningPrivateKeyFile string `ini:"JWT_SIGNING_PRIVATE_KEY_FILE"`
+ MaxTokenLength int
+ DefaultApplications []string
+ EnableAdditionalGrantScopes bool
+}{
+ Enabled: true,
+ AccessTokenExpirationTime: 3600,
+ RefreshTokenExpirationTime: 730,
+ InvalidateRefreshTokens: true,
+ JWTSigningAlgorithm: "RS256",
+ JWTSigningPrivateKeyFile: "jwt/private.pem",
+ MaxTokenLength: math.MaxInt16,
+ DefaultApplications: []string{"git-credential-oauth", "git-credential-manager", "tea"},
+ EnableAdditionalGrantScopes: false,
+}
+
+func loadOAuth2From(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("oauth2")
+ if err := sec.MapTo(&OAuth2); err != nil {
+ log.Fatal("Failed to map OAuth2 settings: %v", err)
+ return
+ }
+
+ // Handle the rename of ENABLE to ENABLED
+ deprecatedSetting(rootCfg, "oauth2", "ENABLE", "oauth2", "ENABLED", "v1.23.0")
+ if sec.HasKey("ENABLE") && !sec.HasKey("ENABLED") {
+ OAuth2.Enabled = sec.Key("ENABLE").MustBool(OAuth2.Enabled)
+ }
+
+ if !filepath.IsAbs(OAuth2.JWTSigningPrivateKeyFile) {
+ OAuth2.JWTSigningPrivateKeyFile = filepath.Join(AppDataPath, OAuth2.JWTSigningPrivateKeyFile)
+ }
+
+ // FIXME: at the moment, no matter oauth2 is enabled or not, it must generate a "oauth2 JWT_SECRET"
+ // Because this secret is also used as GeneralTokenSigningSecret (as a quick not-that-breaking fix for some legacy problems).
+ // Including: CSRF token, account validation token, etc ...
+ // In main branch, the signing token should be refactored (eg: one unique for LFS/OAuth2/etc ...)
+ jwtSecretBase64 := loadSecret(sec, "JWT_SECRET_URI", "JWT_SECRET")
+ if InstallLock {
+ jwtSecretBytes, err := generate.DecodeJwtSecret(jwtSecretBase64)
+ if err != nil {
+ jwtSecretBytes, jwtSecretBase64, err = generate.NewJwtSecret()
+ if err != nil {
+ log.Fatal("error generating JWT secret: %v", err)
+ }
+ saveCfg, err := rootCfg.PrepareSaving()
+ if err != nil {
+ log.Fatal("save oauth2.JWT_SECRET failed: %v", err)
+ }
+ rootCfg.Section("oauth2").Key("JWT_SECRET").SetValue(jwtSecretBase64)
+ saveCfg.Section("oauth2").Key("JWT_SECRET").SetValue(jwtSecretBase64)
+ if err := saveCfg.Save(); err != nil {
+ log.Fatal("save oauth2.JWT_SECRET failed: %v", err)
+ }
+ }
+ generalSigningSecret.Store(&jwtSecretBytes)
+ }
+}
+
+var generalSigningSecret atomic.Pointer[[]byte]
+
+func GetGeneralTokenSigningSecret() []byte {
+ old := generalSigningSecret.Load()
+ if old == nil || len(*old) == 0 {
+ jwtSecret, _, err := generate.NewJwtSecret()
+ if err != nil {
+ log.Fatal("Unable to generate general JWT secret: %v", err)
+ }
+ if generalSigningSecret.CompareAndSwap(old, &jwtSecret) {
+ return jwtSecret
+ }
+ return *generalSigningSecret.Load()
+ }
+ return *old
+}
diff --git a/modules/setting/oauth2_test.go b/modules/setting/oauth2_test.go
new file mode 100644
index 0000000..18252b2
--- /dev/null
+++ b/modules/setting/oauth2_test.go
@@ -0,0 +1,61 @@
+// Copyright 2024 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os"
+ "testing"
+
+ "code.gitea.io/gitea/modules/generate"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestGetGeneralSigningSecret(t *testing.T) {
+ // when there is no general signing secret, it should be generated, and keep the same value
+ generalSigningSecret.Store(nil)
+ s1 := GetGeneralTokenSigningSecret()
+ assert.NotNil(t, s1)
+ s2 := GetGeneralTokenSigningSecret()
+ assert.Equal(t, s1, s2)
+
+ // the config value should always override any pre-generated value
+ cfg, _ := NewConfigProviderFromData(`
+[oauth2]
+JWT_SECRET = BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
+`)
+ defer test.MockVariableValue(&InstallLock, true)()
+ loadOAuth2From(cfg)
+ actual := GetGeneralTokenSigningSecret()
+ expected, _ := generate.DecodeJwtSecret("BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB")
+ assert.Len(t, actual, 32)
+ assert.EqualValues(t, expected, actual)
+}
+
+func TestGetGeneralSigningSecretSave(t *testing.T) {
+ defer test.MockVariableValue(&InstallLock, true)()
+
+ old := GetGeneralTokenSigningSecret()
+ assert.Len(t, old, 32)
+
+ tmpFile := t.TempDir() + "/app.ini"
+ _ = os.WriteFile(tmpFile, nil, 0o644)
+ cfg, _ := NewConfigProviderFromFile(tmpFile)
+ loadOAuth2From(cfg)
+ generated := GetGeneralTokenSigningSecret()
+ assert.Len(t, generated, 32)
+ assert.NotEqual(t, old, generated)
+
+ generalSigningSecret.Store(nil)
+ cfg, _ = NewConfigProviderFromFile(tmpFile)
+ loadOAuth2From(cfg)
+ again := GetGeneralTokenSigningSecret()
+ assert.Equal(t, generated, again)
+
+ iniContent, err := os.ReadFile(tmpFile)
+ require.NoError(t, err)
+ assert.Contains(t, string(iniContent), "JWT_SECRET = ")
+}
diff --git a/modules/setting/other.go b/modules/setting/other.go
new file mode 100644
index 0000000..4ba4947
--- /dev/null
+++ b/modules/setting/other.go
@@ -0,0 +1,29 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import "code.gitea.io/gitea/modules/log"
+
+type OtherConfig struct {
+ ShowFooterVersion bool
+ ShowFooterTemplateLoadTime bool
+ ShowFooterPoweredBy bool
+ EnableFeed bool
+ EnableSitemap bool
+}
+
+var Other = OtherConfig{
+ ShowFooterVersion: true,
+ ShowFooterTemplateLoadTime: true,
+ ShowFooterPoweredBy: true,
+ EnableSitemap: true,
+ EnableFeed: true,
+}
+
+func loadOtherFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("other")
+ if err := sec.MapTo(&Other); err != nil {
+ log.Fatal("Failed to map [other] settings: %v", err)
+ }
+}
diff --git a/modules/setting/packages.go b/modules/setting/packages.go
new file mode 100644
index 0000000..b3f5061
--- /dev/null
+++ b/modules/setting/packages.go
@@ -0,0 +1,124 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "math"
+ "net/url"
+ "os"
+ "path/filepath"
+
+ "github.com/dustin/go-humanize"
+)
+
+// Package registry settings
+var (
+ Packages = struct {
+ Storage *Storage
+ Enabled bool
+ ChunkedUploadPath string
+ RegistryHost string
+
+ LimitTotalOwnerCount int64
+ LimitTotalOwnerSize int64
+ LimitSizeAlpine int64
+ LimitSizeArch int64
+ LimitSizeCargo int64
+ LimitSizeChef int64
+ LimitSizeComposer int64
+ LimitSizeConan int64
+ LimitSizeConda int64
+ LimitSizeContainer int64
+ LimitSizeCran int64
+ LimitSizeDebian int64
+ LimitSizeGeneric int64
+ LimitSizeGo int64
+ LimitSizeHelm int64
+ LimitSizeMaven int64
+ LimitSizeNpm int64
+ LimitSizeNuGet int64
+ LimitSizePub int64
+ LimitSizePyPI int64
+ LimitSizeRpm int64
+ LimitSizeRubyGems int64
+ LimitSizeSwift int64
+ LimitSizeVagrant int64
+ DefaultRPMSignEnabled bool
+ }{
+ Enabled: true,
+ LimitTotalOwnerCount: -1,
+ }
+)
+
+func loadPackagesFrom(rootCfg ConfigProvider) (err error) {
+ sec, _ := rootCfg.GetSection("packages")
+ if sec == nil {
+ Packages.Storage, err = getStorage(rootCfg, "packages", "", nil)
+ return err
+ }
+
+ if err = sec.MapTo(&Packages); err != nil {
+ return fmt.Errorf("failed to map Packages settings: %v", err)
+ }
+
+ Packages.Storage, err = getStorage(rootCfg, "packages", "", sec)
+ if err != nil {
+ return err
+ }
+
+ appURL, _ := url.Parse(AppURL)
+ Packages.RegistryHost = appURL.Host
+
+ Packages.ChunkedUploadPath = filepath.ToSlash(sec.Key("CHUNKED_UPLOAD_PATH").MustString("tmp/package-upload"))
+ if !filepath.IsAbs(Packages.ChunkedUploadPath) {
+ Packages.ChunkedUploadPath = filepath.ToSlash(filepath.Join(AppDataPath, Packages.ChunkedUploadPath))
+ }
+
+ if HasInstallLock(rootCfg) {
+ if err := os.MkdirAll(Packages.ChunkedUploadPath, os.ModePerm); err != nil {
+ return fmt.Errorf("unable to create chunked upload directory: %s (%v)", Packages.ChunkedUploadPath, err)
+ }
+ }
+
+ Packages.LimitTotalOwnerSize = mustBytes(sec, "LIMIT_TOTAL_OWNER_SIZE")
+ Packages.LimitSizeAlpine = mustBytes(sec, "LIMIT_SIZE_ALPINE")
+ Packages.LimitSizeArch = mustBytes(sec, "LIMIT_SIZE_ARCH")
+ Packages.LimitSizeCargo = mustBytes(sec, "LIMIT_SIZE_CARGO")
+ Packages.LimitSizeChef = mustBytes(sec, "LIMIT_SIZE_CHEF")
+ Packages.LimitSizeComposer = mustBytes(sec, "LIMIT_SIZE_COMPOSER")
+ Packages.LimitSizeConan = mustBytes(sec, "LIMIT_SIZE_CONAN")
+ Packages.LimitSizeConda = mustBytes(sec, "LIMIT_SIZE_CONDA")
+ Packages.LimitSizeContainer = mustBytes(sec, "LIMIT_SIZE_CONTAINER")
+ Packages.LimitSizeCran = mustBytes(sec, "LIMIT_SIZE_CRAN")
+ Packages.LimitSizeDebian = mustBytes(sec, "LIMIT_SIZE_DEBIAN")
+ Packages.LimitSizeGeneric = mustBytes(sec, "LIMIT_SIZE_GENERIC")
+ Packages.LimitSizeGo = mustBytes(sec, "LIMIT_SIZE_GO")
+ Packages.LimitSizeHelm = mustBytes(sec, "LIMIT_SIZE_HELM")
+ Packages.LimitSizeMaven = mustBytes(sec, "LIMIT_SIZE_MAVEN")
+ Packages.LimitSizeNpm = mustBytes(sec, "LIMIT_SIZE_NPM")
+ Packages.LimitSizeNuGet = mustBytes(sec, "LIMIT_SIZE_NUGET")
+ Packages.LimitSizePub = mustBytes(sec, "LIMIT_SIZE_PUB")
+ Packages.LimitSizePyPI = mustBytes(sec, "LIMIT_SIZE_PYPI")
+ Packages.LimitSizeRpm = mustBytes(sec, "LIMIT_SIZE_RPM")
+ Packages.LimitSizeRubyGems = mustBytes(sec, "LIMIT_SIZE_RUBYGEMS")
+ Packages.LimitSizeSwift = mustBytes(sec, "LIMIT_SIZE_SWIFT")
+ Packages.LimitSizeVagrant = mustBytes(sec, "LIMIT_SIZE_VAGRANT")
+ Packages.DefaultRPMSignEnabled = sec.Key("DEFAULT_RPM_SIGN_ENABLED").MustBool(false)
+ return nil
+}
+
+func mustBytes(section ConfigSection, key string) int64 {
+ const noLimit = "-1"
+
+ value := section.Key(key).MustString(noLimit)
+ if value == noLimit {
+ return -1
+ }
+ bytes, err := humanize.ParseBytes(value)
+ if err != nil || bytes > math.MaxInt64 {
+ return -1
+ }
+ return int64(bytes)
+}
diff --git a/modules/setting/packages_test.go b/modules/setting/packages_test.go
new file mode 100644
index 0000000..78eb4b4
--- /dev/null
+++ b/modules/setting/packages_test.go
@@ -0,0 +1,199 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestMustBytes(t *testing.T) {
+ test := func(value string) int64 {
+ cfg, err := NewConfigProviderFromData("[test]")
+ require.NoError(t, err)
+ sec := cfg.Section("test")
+ sec.NewKey("VALUE", value)
+
+ return mustBytes(sec, "VALUE")
+ }
+
+ assert.EqualValues(t, -1, test(""))
+ assert.EqualValues(t, -1, test("-1"))
+ assert.EqualValues(t, 0, test("0"))
+ assert.EqualValues(t, 1, test("1"))
+ assert.EqualValues(t, 10000, test("10000"))
+ assert.EqualValues(t, 1000000, test("1 mb"))
+ assert.EqualValues(t, 1048576, test("1mib"))
+ assert.EqualValues(t, 1782579, test("1.7mib"))
+ assert.EqualValues(t, -1, test("1 yib")) // too large
+}
+
+func Test_getStorageInheritNameSectionTypeForPackages(t *testing.T) {
+ // packages storage inherits from storage if nothing configured
+ iniStr := `
+[storage]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadPackagesFrom(cfg))
+
+ assert.EqualValues(t, "minio", Packages.Storage.Type)
+ assert.EqualValues(t, "packages/", Packages.Storage.MinioConfig.BasePath)
+
+ // we can also configure packages storage directly
+ iniStr = `
+[storage.packages]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadPackagesFrom(cfg))
+
+ assert.EqualValues(t, "minio", Packages.Storage.Type)
+ assert.EqualValues(t, "packages/", Packages.Storage.MinioConfig.BasePath)
+
+ // or we can indicate the storage type in the packages section
+ iniStr = `
+[packages]
+STORAGE_TYPE = my_minio
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadPackagesFrom(cfg))
+
+ assert.EqualValues(t, "minio", Packages.Storage.Type)
+ assert.EqualValues(t, "packages/", Packages.Storage.MinioConfig.BasePath)
+
+ // or we can indicate the storage type and minio base path in the packages section
+ iniStr = `
+[packages]
+STORAGE_TYPE = my_minio
+MINIO_BASE_PATH = my_packages/
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadPackagesFrom(cfg))
+
+ assert.EqualValues(t, "minio", Packages.Storage.Type)
+ assert.EqualValues(t, "my_packages/", Packages.Storage.MinioConfig.BasePath)
+}
+
+func Test_PackageStorage1(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[packages]
+MINIO_BASE_PATH = packages/
+SERVE_DIRECT = true
+[storage]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadPackagesFrom(cfg))
+ storage := Packages.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "packages/", storage.MinioConfig.BasePath)
+ assert.True(t, storage.MinioConfig.ServeDirect)
+}
+
+func Test_PackageStorage2(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[storage.packages]
+MINIO_BASE_PATH = packages/
+SERVE_DIRECT = true
+[storage]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadPackagesFrom(cfg))
+ storage := Packages.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "packages/", storage.MinioConfig.BasePath)
+ assert.True(t, storage.MinioConfig.ServeDirect)
+}
+
+func Test_PackageStorage3(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[packages]
+STORAGE_TYPE = my_cfg
+MINIO_BASE_PATH = my_packages/
+SERVE_DIRECT = true
+[storage.my_cfg]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadPackagesFrom(cfg))
+ storage := Packages.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "my_packages/", storage.MinioConfig.BasePath)
+ assert.True(t, storage.MinioConfig.ServeDirect)
+}
+
+func Test_PackageStorage4(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[storage.packages]
+STORAGE_TYPE = my_cfg
+MINIO_BASE_PATH = my_packages/
+SERVE_DIRECT = true
+[storage.my_cfg]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadPackagesFrom(cfg))
+ storage := Packages.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "my_packages/", storage.MinioConfig.BasePath)
+ assert.True(t, storage.MinioConfig.ServeDirect)
+}
diff --git a/modules/setting/path.go b/modules/setting/path.go
new file mode 100644
index 0000000..85d0e06
--- /dev/null
+++ b/modules/setting/path.go
@@ -0,0 +1,214 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+var (
+ // AppPath represents the path to the gitea binary
+ AppPath string
+
+ // AppWorkPath is the "working directory" of Gitea. It maps to the: WORK_PATH in app.ini, "--work-path" flag, environment variable GITEA_WORK_DIR.
+ // If that is not set it is the default set here by the linker or failing that the directory of AppPath.
+ // It is used as the base path for several other paths.
+ AppWorkPath string
+ CustomPath string // Custom directory path. Env: GITEA_CUSTOM
+ CustomConf string
+
+ appWorkPathBuiltin string
+ customPathBuiltin string
+ customConfBuiltin string
+
+ AppWorkPathMismatch bool
+)
+
+func getAppPath() (string, error) {
+ var appPath string
+ var err error
+ if IsWindows && filepath.IsAbs(os.Args[0]) {
+ appPath = filepath.Clean(os.Args[0])
+ } else {
+ appPath, err = exec.LookPath(os.Args[0])
+ }
+ if err != nil {
+ if !errors.Is(err, exec.ErrDot) {
+ return "", err
+ }
+ appPath, err = filepath.Abs(os.Args[0])
+ }
+ if err != nil {
+ return "", err
+ }
+ appPath, err = filepath.Abs(appPath)
+ if err != nil {
+ return "", err
+ }
+ // Note: (legacy code) we don't use path.Dir here because it does not handle case which path starts with two "/" in Windows: "//psf/Home/..."
+ return strings.ReplaceAll(appPath, "\\", "/"), err
+}
+
+func init() {
+ var err error
+ if AppPath, err = getAppPath(); err != nil {
+ log.Fatal("Failed to get app path: %v", err)
+ }
+
+ if AppWorkPath == "" {
+ AppWorkPath = filepath.Dir(AppPath)
+ }
+
+ appWorkPathBuiltin = AppWorkPath
+ customPathBuiltin = CustomPath
+ customConfBuiltin = CustomConf
+}
+
+type ArgWorkPathAndCustomConf struct {
+ WorkPath string
+ CustomPath string
+ CustomConf string
+}
+
+type stringWithDefault struct {
+ Value string
+ IsSet bool
+}
+
+func (s *stringWithDefault) Set(v string) {
+ s.Value = v
+ s.IsSet = true
+}
+
+// InitWorkPathAndCommonConfig will set AppWorkPath, CustomPath and CustomConf, init default config provider by CustomConf and load common settings,
+func InitWorkPathAndCommonConfig(getEnvFn func(name string) string, args ArgWorkPathAndCustomConf) {
+ InitWorkPathAndCfgProvider(getEnvFn, args)
+ LoadCommonSettings()
+}
+
+// InitWorkPathAndCfgProvider will set AppWorkPath, CustomPath and CustomConf, init default config provider by CustomConf
+func InitWorkPathAndCfgProvider(getEnvFn func(name string) string, args ArgWorkPathAndCustomConf) {
+ tryAbsPath := func(paths ...string) string {
+ s := paths[len(paths)-1]
+ for i := len(paths) - 2; i >= 0; i-- {
+ if filepath.IsAbs(s) {
+ break
+ }
+ s = filepath.Join(paths[i], s)
+ }
+ return s
+ }
+
+ var err error
+ tmpWorkPath := stringWithDefault{Value: appWorkPathBuiltin}
+ if tmpWorkPath.Value == "" {
+ tmpWorkPath.Value = filepath.Dir(AppPath)
+ }
+ tmpCustomPath := stringWithDefault{Value: customPathBuiltin}
+ if tmpCustomPath.Value == "" {
+ tmpCustomPath.Value = "custom"
+ }
+ tmpCustomConf := stringWithDefault{Value: customConfBuiltin}
+ if tmpCustomConf.Value == "" {
+ tmpCustomConf.Value = "conf/app.ini"
+ }
+
+ readFromEnv := func() {
+ envWorkPath := getEnvFn("GITEA_WORK_DIR")
+ if envWorkPath != "" {
+ tmpWorkPath.Set(envWorkPath)
+ if !filepath.IsAbs(tmpWorkPath.Value) {
+ log.Fatal("GITEA_WORK_DIR (work path) must be absolute path")
+ }
+ }
+
+ envWorkPath = getEnvFn("FORGEJO_WORK_DIR")
+ if envWorkPath != "" {
+ tmpWorkPath.Set(envWorkPath)
+ if !filepath.IsAbs(tmpWorkPath.Value) {
+ log.Fatal("FORGEJO_WORK_DIR (work path) must be absolute path")
+ }
+ }
+
+ envCustomPath := getEnvFn("GITEA_CUSTOM")
+ if envCustomPath != "" {
+ tmpCustomPath.Set(envCustomPath)
+ if !filepath.IsAbs(tmpCustomPath.Value) {
+ log.Fatal("GITEA_CUSTOM (custom path) must be absolute path")
+ }
+ }
+
+ envCustomPath = getEnvFn("FORGEJO_CUSTOM")
+ if envCustomPath != "" {
+ tmpCustomPath.Set(envCustomPath)
+ if !filepath.IsAbs(tmpCustomPath.Value) {
+ log.Fatal("FORGEJO_CUSTOM (custom path) must be absolute path")
+ }
+ }
+ }
+
+ readFromArgs := func() {
+ if args.WorkPath != "" {
+ tmpWorkPath.Set(args.WorkPath)
+ if !filepath.IsAbs(tmpWorkPath.Value) {
+ log.Fatal("--work-path must be absolute path")
+ }
+ }
+ if args.CustomPath != "" {
+ tmpCustomPath.Set(args.CustomPath) // if it is not abs, it will be based on work-path, it shouldn't happen
+ if !filepath.IsAbs(tmpCustomPath.Value) {
+ log.Error("--custom-path must be absolute path")
+ }
+ }
+ if args.CustomConf != "" {
+ tmpCustomConf.Set(args.CustomConf)
+ if !filepath.IsAbs(tmpCustomConf.Value) {
+ // the config path can be relative to the real current working path
+ if tmpCustomConf.Value, err = filepath.Abs(tmpCustomConf.Value); err != nil {
+ log.Fatal("Failed to get absolute path of config %q: %v", tmpCustomConf.Value, err)
+ }
+ }
+ }
+ }
+
+ readFromEnv()
+ readFromArgs()
+
+ if !tmpCustomConf.IsSet {
+ tmpCustomConf.Set(tryAbsPath(tmpWorkPath.Value, tmpCustomPath.Value, tmpCustomConf.Value))
+ }
+
+ // only read the config but do not load/init anything more, because the AppWorkPath and CustomPath are not ready
+ InitCfgProvider(tmpCustomConf.Value)
+ if HasInstallLock(CfgProvider) {
+ ClearEnvConfigKeys() // if the instance has been installed, do not pass the environment variables to sub-processes
+ }
+ configWorkPath := ConfigSectionKeyString(CfgProvider.Section(""), "WORK_PATH")
+ if configWorkPath != "" {
+ if !filepath.IsAbs(configWorkPath) {
+ log.Fatal("WORK_PATH in %q must be absolute path", configWorkPath)
+ }
+ configWorkPath = filepath.Clean(configWorkPath)
+ if tmpWorkPath.Value != "" && (getEnvFn("GITEA_WORK_DIR") != "" || getEnvFn("FORGEJO_WORK_DIR") != "" || args.WorkPath != "") {
+ fi1, err1 := os.Stat(tmpWorkPath.Value)
+ fi2, err2 := os.Stat(configWorkPath)
+ if err1 != nil || err2 != nil || !os.SameFile(fi1, fi2) {
+ AppWorkPathMismatch = true
+ }
+ }
+ tmpWorkPath.Set(configWorkPath)
+ }
+
+ tmpCustomPath.Set(tryAbsPath(tmpWorkPath.Value, tmpCustomPath.Value))
+
+ AppWorkPath = tmpWorkPath.Value
+ CustomPath = tmpCustomPath.Value
+ CustomConf = tmpCustomConf.Value
+}
diff --git a/modules/setting/path_test.go b/modules/setting/path_test.go
new file mode 100644
index 0000000..4508bae
--- /dev/null
+++ b/modules/setting/path_test.go
@@ -0,0 +1,243 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+type envVars map[string]string
+
+func (e envVars) Getenv(key string) string {
+ return e[key]
+}
+
+func TestInitWorkPathAndCommonConfig(t *testing.T) {
+ testInit := func(defaultWorkPath, defaultCustomPath, defaultCustomConf string) {
+ AppWorkPathMismatch = false
+ AppWorkPath = defaultWorkPath
+ appWorkPathBuiltin = defaultWorkPath
+ CustomPath = defaultCustomPath
+ customPathBuiltin = defaultCustomPath
+ CustomConf = defaultCustomConf
+ customConfBuiltin = defaultCustomConf
+ }
+
+ fp := filepath.Join
+
+ tmpDir := t.TempDir()
+ dirFoo := fp(tmpDir, "foo")
+ dirBar := fp(tmpDir, "bar")
+ dirXxx := fp(tmpDir, "xxx")
+ dirYyy := fp(tmpDir, "yyy")
+
+ t.Run("Default", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirFoo, "custom"), CustomPath)
+ assert.Equal(t, fp(dirFoo, "custom/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("WorkDir(env)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirBar, AppWorkPath)
+ assert.Equal(t, fp(dirBar, "custom"), CustomPath)
+ assert.Equal(t, fp(dirBar, "custom/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("WorkDir(env,arg)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{WorkPath: dirXxx})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, fp(dirXxx, "custom/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("WorkDir(env)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirBar, AppWorkPath)
+ assert.Equal(t, fp(dirBar, "custom"), CustomPath)
+ assert.Equal(t, fp(dirBar, "custom/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("WorkDir(env,arg)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{WorkPath: dirXxx})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, fp(dirXxx, "custom/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("CustomPath(env)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_CUSTOM": fp(dirBar, "custom1")}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirBar, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirBar, "custom1/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("CustomPath(env,arg)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_CUSTOM": fp(dirBar, "custom1")}.Getenv, ArgWorkPathAndCustomConf{CustomPath: "custom2"})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirFoo, "custom2"), CustomPath)
+ assert.Equal(t, fp(dirFoo, "custom2/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("CustomPath(env)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_CUSTOM": fp(dirBar, "custom1")}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirBar, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirBar, "custom1/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("CustomPath(env,arg)", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_CUSTOM": fp(dirBar, "custom1")}.Getenv, ArgWorkPathAndCustomConf{CustomPath: "custom2"})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirFoo, "custom2"), CustomPath)
+ assert.Equal(t, fp(dirFoo, "custom2/conf/app.ini"), CustomConf)
+ })
+
+ t.Run("CustomConf", func(t *testing.T) {
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: "app1.ini"})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ cwd, _ := os.Getwd()
+ assert.Equal(t, fp(cwd, "app1.ini"), CustomConf)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: fp(dirBar, "app1.ini")})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirBar, "app1.ini"), CustomConf)
+ })
+
+ t.Run("CustomConfOverrideWorkPath", func(t *testing.T) {
+ iniWorkPath := fp(tmpDir, "app-workpath.ini")
+ _ = os.WriteFile(iniWorkPath, []byte("WORK_PATH="+dirXxx), 0o644)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.False(t, AppWorkPathMismatch)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.True(t, AppWorkPathMismatch)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{WorkPath: dirBar, CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.True(t, AppWorkPathMismatch)
+ })
+
+ t.Run("CustomConfOverrideWorkPath", func(t *testing.T) {
+ iniWorkPath := fp(tmpDir, "app-workpath.ini")
+ _ = os.WriteFile(iniWorkPath, []byte("WORK_PATH="+dirXxx), 0o644)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.False(t, AppWorkPathMismatch)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_WORK_DIR": dirBar}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.True(t, AppWorkPathMismatch)
+
+ testInit(dirFoo, "", "")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{WorkPath: dirBar, CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ assert.True(t, AppWorkPathMismatch)
+ })
+
+ t.Run("Builtin", func(t *testing.T) {
+ testInit(dirFoo, dirBar, dirXxx)
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, dirBar, CustomPath)
+ assert.Equal(t, dirXxx, CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirFoo, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirFoo, "custom1/cfg.ini"), CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_WORK_DIR": dirYyy}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirYyy, AppWorkPath)
+ assert.Equal(t, fp(dirYyy, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirYyy, "custom1/cfg.ini"), CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{"GITEA_CUSTOM": dirYyy}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, dirYyy, CustomPath)
+ assert.Equal(t, fp(dirYyy, "cfg.ini"), CustomConf)
+
+ iniWorkPath := fp(tmpDir, "app-workpath.ini")
+ _ = os.WriteFile(iniWorkPath, []byte("WORK_PATH="+dirXxx), 0o644)
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom1"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ })
+
+ t.Run("Builtin", func(t *testing.T) {
+ testInit(dirFoo, dirBar, dirXxx)
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, dirBar, CustomPath)
+ assert.Equal(t, dirXxx, CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, fp(dirFoo, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirFoo, "custom1/cfg.ini"), CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_WORK_DIR": dirYyy}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirYyy, AppWorkPath)
+ assert.Equal(t, fp(dirYyy, "custom1"), CustomPath)
+ assert.Equal(t, fp(dirYyy, "custom1/cfg.ini"), CustomConf)
+
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{"FORGEJO_CUSTOM": dirYyy}.Getenv, ArgWorkPathAndCustomConf{})
+ assert.Equal(t, dirFoo, AppWorkPath)
+ assert.Equal(t, dirYyy, CustomPath)
+ assert.Equal(t, fp(dirYyy, "cfg.ini"), CustomConf)
+
+ iniWorkPath := fp(tmpDir, "app-workpath.ini")
+ _ = os.WriteFile(iniWorkPath, []byte("WORK_PATH="+dirXxx), 0o644)
+ testInit(dirFoo, "custom1", "cfg.ini")
+ InitWorkPathAndCommonConfig(envVars{}.Getenv, ArgWorkPathAndCustomConf{CustomConf: iniWorkPath})
+ assert.Equal(t, dirXxx, AppWorkPath)
+ assert.Equal(t, fp(dirXxx, "custom1"), CustomPath)
+ assert.Equal(t, iniWorkPath, CustomConf)
+ })
+}
diff --git a/modules/setting/picture.go b/modules/setting/picture.go
new file mode 100644
index 0000000..fafae45
--- /dev/null
+++ b/modules/setting/picture.go
@@ -0,0 +1,109 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Avatar settings
+
+var (
+ Avatar = struct {
+ Storage *Storage
+
+ MaxWidth int
+ MaxHeight int
+ MaxFileSize int64
+ MaxOriginSize int64
+ RenderedSizeFactor int
+ }{
+ MaxWidth: 4096,
+ MaxHeight: 4096,
+ MaxFileSize: 1048576,
+ MaxOriginSize: 262144,
+ RenderedSizeFactor: 2,
+ }
+
+ GravatarSource string
+ DisableGravatar bool // Depreciated: migrated to database
+ EnableFederatedAvatar bool // Depreciated: migrated to database
+
+ RepoAvatar = struct {
+ Storage *Storage
+
+ Fallback string
+ FallbackImage string
+ }{}
+)
+
+func loadAvatarsFrom(rootCfg ConfigProvider) error {
+ sec := rootCfg.Section("picture")
+
+ avatarSec := rootCfg.Section("avatar")
+ storageType := sec.Key("AVATAR_STORAGE_TYPE").MustString("")
+ // Specifically default PATH to AVATAR_UPLOAD_PATH
+ avatarSec.Key("PATH").MustString(sec.Key("AVATAR_UPLOAD_PATH").String())
+
+ var err error
+ Avatar.Storage, err = getStorage(rootCfg, "avatars", storageType, avatarSec)
+ if err != nil {
+ return err
+ }
+
+ Avatar.MaxWidth = sec.Key("AVATAR_MAX_WIDTH").MustInt(4096)
+ Avatar.MaxHeight = sec.Key("AVATAR_MAX_HEIGHT").MustInt(4096)
+ Avatar.MaxFileSize = sec.Key("AVATAR_MAX_FILE_SIZE").MustInt64(1048576)
+ Avatar.MaxOriginSize = sec.Key("AVATAR_MAX_ORIGIN_SIZE").MustInt64(262144)
+ Avatar.RenderedSizeFactor = sec.Key("AVATAR_RENDERED_SIZE_FACTOR").MustInt(2)
+
+ switch source := sec.Key("GRAVATAR_SOURCE").MustString("gravatar"); source {
+ case "duoshuo":
+ GravatarSource = "http://gravatar.duoshuo.com/avatar/"
+ case "gravatar":
+ GravatarSource = "https://secure.gravatar.com/avatar/"
+ case "libravatar":
+ GravatarSource = "https://seccdn.libravatar.org/avatar/"
+ default:
+ GravatarSource = source
+ }
+
+ DisableGravatar = sec.Key("DISABLE_GRAVATAR").MustBool(GetDefaultDisableGravatar())
+ deprecatedSettingDB(rootCfg, "", "DISABLE_GRAVATAR")
+ EnableFederatedAvatar = sec.Key("ENABLE_FEDERATED_AVATAR").MustBool(GetDefaultEnableFederatedAvatar(DisableGravatar))
+ deprecatedSettingDB(rootCfg, "", "ENABLE_FEDERATED_AVATAR")
+
+ return nil
+}
+
+func GetDefaultDisableGravatar() bool {
+ return OfflineMode
+}
+
+func GetDefaultEnableFederatedAvatar(disableGravatar bool) bool {
+ v := !InstallLock
+ if OfflineMode {
+ v = false
+ }
+ if disableGravatar {
+ v = false
+ }
+ return v
+}
+
+func loadRepoAvatarFrom(rootCfg ConfigProvider) error {
+ sec := rootCfg.Section("picture")
+
+ repoAvatarSec := rootCfg.Section("repo-avatar")
+ storageType := sec.Key("REPOSITORY_AVATAR_STORAGE_TYPE").MustString("")
+ // Specifically default PATH to AVATAR_UPLOAD_PATH
+ repoAvatarSec.Key("PATH").MustString(sec.Key("REPOSITORY_AVATAR_UPLOAD_PATH").String())
+
+ var err error
+ RepoAvatar.Storage, err = getStorage(rootCfg, "repo-avatars", storageType, repoAvatarSec)
+ if err != nil {
+ return err
+ }
+
+ RepoAvatar.Fallback = sec.Key("REPOSITORY_AVATAR_FALLBACK").MustString("none")
+ RepoAvatar.FallbackImage = sec.Key("REPOSITORY_AVATAR_FALLBACK_IMAGE").MustString(AppSubURL + "/assets/img/repo_default.png")
+
+ return nil
+}
diff --git a/modules/setting/project.go b/modules/setting/project.go
new file mode 100644
index 0000000..803e933
--- /dev/null
+++ b/modules/setting/project.go
@@ -0,0 +1,19 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Project settings
+var (
+ Project = struct {
+ ProjectBoardBasicKanbanType []string
+ ProjectBoardBugTriageType []string
+ }{
+ ProjectBoardBasicKanbanType: []string{"To Do", "In Progress", "Done"},
+ ProjectBoardBugTriageType: []string{"Needs Triage", "High Priority", "Low Priority", "Closed"},
+ }
+)
+
+func loadProjectFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "project", &Project)
+}
diff --git a/modules/setting/proxy.go b/modules/setting/proxy.go
new file mode 100644
index 0000000..4ff420d
--- /dev/null
+++ b/modules/setting/proxy.go
@@ -0,0 +1,37 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/url"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Proxy settings
+var Proxy = struct {
+ Enabled bool
+ ProxyURL string
+ ProxyURLFixed *url.URL
+ ProxyHosts []string
+}{
+ Enabled: false,
+ ProxyURL: "",
+ ProxyHosts: []string{},
+}
+
+func loadProxyFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("proxy")
+ Proxy.Enabled = sec.Key("PROXY_ENABLED").MustBool(false)
+ Proxy.ProxyURL = sec.Key("PROXY_URL").MustString("")
+ if Proxy.ProxyURL != "" {
+ var err error
+ Proxy.ProxyURLFixed, err = url.Parse(Proxy.ProxyURL)
+ if err != nil {
+ log.Error("Global PROXY_URL is not valid")
+ Proxy.ProxyURL = ""
+ }
+ }
+ Proxy.ProxyHosts = sec.Key("PROXY_HOSTS").Strings(",")
+}
diff --git a/modules/setting/queue.go b/modules/setting/queue.go
new file mode 100644
index 0000000..251a6c1
--- /dev/null
+++ b/modules/setting/queue.go
@@ -0,0 +1,120 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "runtime"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// QueueSettings represent the settings for a queue from the ini
+type QueueSettings struct {
+ Name string // not an INI option, it is the name for [queue.the-name] section
+
+ Type string
+ Datadir string
+ ConnStr string // for leveldb or redis
+ Length int // max queue length before blocking
+
+ QueueName, SetName string // the name suffix for storage (db key, redis key), "set" is for unique queue
+
+ BatchLength int
+ MaxWorkers int
+}
+
+func GetQueueSettings(rootCfg ConfigProvider, name string) (QueueSettings, error) {
+ queueSettingsDefault := QueueSettings{
+ Type: "level", // dummy, channel, level, redis
+ Datadir: "queues/common", // relative to AppDataPath
+ Length: 100000, // queue length before a channel queue will block
+
+ QueueName: "_queue",
+ SetName: "_unique",
+ BatchLength: 20,
+ MaxWorkers: runtime.NumCPU() / 2,
+ }
+ if queueSettingsDefault.MaxWorkers < 1 {
+ queueSettingsDefault.MaxWorkers = 1
+ }
+ if queueSettingsDefault.MaxWorkers > 10 {
+ queueSettingsDefault.MaxWorkers = 10
+ }
+
+ // deep copy default settings
+ cfg := QueueSettings{}
+ if cfgBs, err := json.Marshal(queueSettingsDefault); err != nil {
+ return cfg, err
+ } else if err = json.Unmarshal(cfgBs, &cfg); err != nil {
+ return cfg, err
+ }
+
+ cfg.Name = name
+ if sec, err := rootCfg.GetSection("queue"); err == nil {
+ if err = sec.MapTo(&cfg); err != nil {
+ log.Error("Failed to map queue common config for %q: %v", name, err)
+ return cfg, nil
+ }
+ }
+ if sec, err := rootCfg.GetSection("queue." + name); err == nil {
+ if err = sec.MapTo(&cfg); err != nil {
+ log.Error("Failed to map queue spec config for %q: %v", name, err)
+ return cfg, nil
+ }
+ if sec.HasKey("CONN_STR") {
+ cfg.ConnStr = sec.Key("CONN_STR").String()
+ }
+ }
+
+ if cfg.Datadir == "" {
+ cfg.Datadir = queueSettingsDefault.Datadir
+ }
+ if !filepath.IsAbs(cfg.Datadir) {
+ cfg.Datadir = filepath.Join(AppDataPath, cfg.Datadir)
+ }
+ cfg.Datadir = filepath.ToSlash(cfg.Datadir)
+
+ if cfg.Type == "redis" && cfg.ConnStr == "" {
+ cfg.ConnStr = "redis://127.0.0.1:6379/0"
+ }
+
+ if cfg.Length <= 0 {
+ cfg.Length = queueSettingsDefault.Length
+ }
+ if cfg.MaxWorkers <= 0 {
+ cfg.MaxWorkers = queueSettingsDefault.MaxWorkers
+ }
+ if cfg.BatchLength <= 0 {
+ cfg.BatchLength = queueSettingsDefault.BatchLength
+ }
+
+ return cfg, nil
+}
+
+func LoadQueueSettings() {
+ loadQueueFrom(CfgProvider)
+}
+
+func loadQueueFrom(rootCfg ConfigProvider) {
+ hasOld := false
+ handleOldLengthConfiguration := func(rootCfg ConfigProvider, newQueueName, oldSection, oldKey string) {
+ if rootCfg.Section(oldSection).HasKey(oldKey) {
+ hasOld = true
+ log.Error("Removed queue option: `[%s].%s`. Use new options in `[queue.%s]`", oldSection, oldKey, newQueueName)
+ }
+ }
+ handleOldLengthConfiguration(rootCfg, "issue_indexer", "indexer", "ISSUE_INDEXER_QUEUE_TYPE")
+ handleOldLengthConfiguration(rootCfg, "issue_indexer", "indexer", "ISSUE_INDEXER_QUEUE_BATCH_NUMBER")
+ handleOldLengthConfiguration(rootCfg, "issue_indexer", "indexer", "ISSUE_INDEXER_QUEUE_DIR")
+ handleOldLengthConfiguration(rootCfg, "issue_indexer", "indexer", "ISSUE_INDEXER_QUEUE_CONN_STR")
+ handleOldLengthConfiguration(rootCfg, "issue_indexer", "indexer", "UPDATE_BUFFER_LEN")
+ handleOldLengthConfiguration(rootCfg, "mailer", "mailer", "SEND_BUFFER_LEN")
+ handleOldLengthConfiguration(rootCfg, "pr_patch_checker", "repository", "PULL_REQUEST_QUEUE_LENGTH")
+ handleOldLengthConfiguration(rootCfg, "mirror", "repository", "MIRROR_QUEUE_LENGTH")
+ if hasOld {
+ log.Fatal("Please update your app.ini to remove deprecated config options")
+ }
+}
diff --git a/modules/setting/quota.go b/modules/setting/quota.go
new file mode 100644
index 0000000..05e14ba
--- /dev/null
+++ b/modules/setting/quota.go
@@ -0,0 +1,26 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// Quota settings
+var Quota = struct {
+ Enabled bool `ini:"ENABLED"`
+ DefaultGroups []string `ini:"DEFAULT_GROUPS"`
+
+ Default struct {
+ Total int64
+ } `ini:"quota.default"`
+}{
+ Enabled: false,
+ DefaultGroups: []string{},
+ Default: struct {
+ Total int64
+ }{
+ Total: -1,
+ },
+}
+
+func loadQuotaFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "quota", &Quota)
+}
diff --git a/modules/setting/repository.go b/modules/setting/repository.go
new file mode 100644
index 0000000..6086dd1
--- /dev/null
+++ b/modules/setting/repository.go
@@ -0,0 +1,376 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os/exec"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// enumerates all the policy repository creating
+const (
+ RepoCreatingLastUserVisibility = "last"
+ RepoCreatingPrivate = "private"
+ RepoCreatingPublic = "public"
+)
+
+// MaxUserCardsPerPage sets maximum amount of watchers and stargazers shown per page
+// those pages use 2 or 3 column layout, so the value should be divisible by 2 and 3
+var MaxUserCardsPerPage = 36
+
+// MaxForksPerPage sets maximum amount of forks shown per page
+var MaxForksPerPage = 40
+
+// Repository settings
+var (
+ Repository = struct {
+ DetectedCharsetsOrder []string
+ DetectedCharsetScore map[string]int `ini:"-"`
+ AnsiCharset string
+ ForcePrivate bool
+ DefaultPrivate string
+ DefaultPushCreatePrivate bool
+ MaxCreationLimit int
+ PreferredLicenses []string
+ DisableHTTPGit bool
+ AccessControlAllowOrigin string
+ UseCompatSSHURI bool
+ GoGetCloneURLProtocol string
+ DefaultCloseIssuesViaCommitsInAnyBranch bool
+ EnablePushCreateUser bool
+ EnablePushCreateOrg bool
+ DisabledRepoUnits []string
+ DefaultRepoUnits []string
+ DefaultForkRepoUnits []string
+ PrefixArchiveFiles bool
+ DisableMigrations bool
+ DisableStars bool
+ DisableForks bool
+ DefaultBranch string
+ AllowAdoptionOfUnadoptedRepositories bool
+ AllowDeleteOfUnadoptedRepositories bool
+ DisableDownloadSourceArchives bool
+ AllowForkWithoutMaximumLimit bool
+
+ // Repository editor settings
+ Editor struct {
+ LineWrapExtensions []string
+ } `ini:"-"`
+
+ // Repository upload settings
+ Upload struct {
+ Enabled bool
+ TempPath string
+ AllowedTypes string
+ FileMaxSize int64
+ MaxFiles int
+ } `ini:"-"`
+
+ // Repository local settings
+ Local struct {
+ LocalCopyPath string
+ } `ini:"-"`
+
+ // Pull request settings
+ PullRequest struct {
+ WorkInProgressPrefixes []string
+ CloseKeywords []string
+ ReopenKeywords []string
+ DefaultMergeStyle string
+ DefaultMergeMessageCommitsLimit int
+ DefaultMergeMessageSize int
+ DefaultMergeMessageAllAuthors bool
+ DefaultMergeMessageMaxApprovers int
+ DefaultMergeMessageOfficialApproversOnly bool
+ PopulateSquashCommentWithCommitMessages bool
+ AddCoCommitterTrailers bool
+ TestConflictingPatchesWithGitApply bool
+ RetargetChildrenOnMerge bool
+ } `ini:"repository.pull-request"`
+
+ // Issue Setting
+ Issue struct {
+ LockReasons []string
+ MaxPinned int
+ } `ini:"repository.issue"`
+
+ Release struct {
+ AllowedTypes string
+ DefaultPagingNum int
+ } `ini:"repository.release"`
+
+ Signing struct {
+ SigningKey string
+ SigningName string
+ SigningEmail string
+ InitialCommit []string
+ CRUDActions []string `ini:"CRUD_ACTIONS"`
+ Merges []string
+ Wiki []string
+ DefaultTrustModel string
+ } `ini:"repository.signing"`
+
+ SettableFlags []string
+ EnableFlags bool
+ }{
+ DetectedCharsetsOrder: []string{
+ "UTF-8",
+ "UTF-16BE",
+ "UTF-16LE",
+ "UTF-32BE",
+ "UTF-32LE",
+ "ISO-8859-1",
+ "windows-1252",
+ "ISO-8859-2",
+ "windows-1250",
+ "ISO-8859-5",
+ "ISO-8859-6",
+ "ISO-8859-7",
+ "windows-1253",
+ "ISO-8859-8-I",
+ "windows-1255",
+ "ISO-8859-8",
+ "windows-1251",
+ "windows-1256",
+ "KOI8-R",
+ "ISO-8859-9",
+ "windows-1254",
+ "Shift_JIS",
+ "GB18030",
+ "EUC-JP",
+ "EUC-KR",
+ "Big5",
+ "ISO-2022-JP",
+ "ISO-2022-KR",
+ "ISO-2022-CN",
+ "IBM424_rtl",
+ "IBM424_ltr",
+ "IBM420_rtl",
+ "IBM420_ltr",
+ },
+ DetectedCharsetScore: map[string]int{},
+ AnsiCharset: "",
+ ForcePrivate: false,
+ DefaultPrivate: RepoCreatingLastUserVisibility,
+ DefaultPushCreatePrivate: true,
+ MaxCreationLimit: -1,
+ PreferredLicenses: []string{"Apache-2.0", "MIT"},
+ DisableHTTPGit: false,
+ AccessControlAllowOrigin: "",
+ UseCompatSSHURI: true,
+ DefaultCloseIssuesViaCommitsInAnyBranch: false,
+ EnablePushCreateUser: false,
+ EnablePushCreateOrg: false,
+ DisabledRepoUnits: []string{},
+ DefaultRepoUnits: []string{},
+ DefaultForkRepoUnits: []string{},
+ PrefixArchiveFiles: true,
+ DisableMigrations: false,
+ DisableStars: false,
+ DisableForks: false,
+ DefaultBranch: "main",
+ AllowForkWithoutMaximumLimit: true,
+
+ // Repository editor settings
+ Editor: struct {
+ LineWrapExtensions []string
+ }{
+ LineWrapExtensions: strings.Split(".txt,.md,.markdown,.mdown,.mkd,.livemd,", ","),
+ },
+
+ // Repository upload settings
+ Upload: struct {
+ Enabled bool
+ TempPath string
+ AllowedTypes string
+ FileMaxSize int64
+ MaxFiles int
+ }{
+ Enabled: true,
+ TempPath: "data/tmp/uploads",
+ AllowedTypes: "",
+ FileMaxSize: 50,
+ MaxFiles: 5,
+ },
+
+ // Repository local settings
+ Local: struct {
+ LocalCopyPath string
+ }{
+ LocalCopyPath: "tmp/local-repo",
+ },
+
+ // Pull request settings
+ PullRequest: struct {
+ WorkInProgressPrefixes []string
+ CloseKeywords []string
+ ReopenKeywords []string
+ DefaultMergeStyle string
+ DefaultMergeMessageCommitsLimit int
+ DefaultMergeMessageSize int
+ DefaultMergeMessageAllAuthors bool
+ DefaultMergeMessageMaxApprovers int
+ DefaultMergeMessageOfficialApproversOnly bool
+ PopulateSquashCommentWithCommitMessages bool
+ AddCoCommitterTrailers bool
+ TestConflictingPatchesWithGitApply bool
+ RetargetChildrenOnMerge bool
+ }{
+ WorkInProgressPrefixes: []string{"WIP:", "[WIP]"},
+ // Same as GitHub. See
+ // https://help.github.com/articles/closing-issues-via-commit-messages
+ CloseKeywords: strings.Split("close,closes,closed,fix,fixes,fixed,resolve,resolves,resolved", ","),
+ ReopenKeywords: strings.Split("reopen,reopens,reopened", ","),
+ DefaultMergeStyle: "merge",
+ DefaultMergeMessageCommitsLimit: 50,
+ DefaultMergeMessageSize: 5 * 1024,
+ DefaultMergeMessageAllAuthors: false,
+ DefaultMergeMessageMaxApprovers: 10,
+ DefaultMergeMessageOfficialApproversOnly: true,
+ PopulateSquashCommentWithCommitMessages: false,
+ AddCoCommitterTrailers: true,
+ RetargetChildrenOnMerge: true,
+ },
+
+ // Issue settings
+ Issue: struct {
+ LockReasons []string
+ MaxPinned int
+ }{
+ LockReasons: strings.Split("Too heated,Off-topic,Spam,Resolved", ","),
+ MaxPinned: 3,
+ },
+
+ Release: struct {
+ AllowedTypes string
+ DefaultPagingNum int
+ }{
+ AllowedTypes: "",
+ DefaultPagingNum: 10,
+ },
+
+ // Signing settings
+ Signing: struct {
+ SigningKey string
+ SigningName string
+ SigningEmail string
+ InitialCommit []string
+ CRUDActions []string `ini:"CRUD_ACTIONS"`
+ Merges []string
+ Wiki []string
+ DefaultTrustModel string
+ }{
+ SigningKey: "default",
+ SigningName: "",
+ SigningEmail: "",
+ InitialCommit: []string{"always"},
+ CRUDActions: []string{"pubkey", "twofa", "parentsigned"},
+ Merges: []string{"pubkey", "twofa", "basesigned", "commitssigned"},
+ Wiki: []string{"never"},
+ DefaultTrustModel: "collaborator",
+ },
+
+ EnableFlags: false,
+ }
+ RepoRootPath string
+ ScriptType = "bash"
+)
+
+func loadRepositoryFrom(rootCfg ConfigProvider) {
+ var err error
+ // Determine and create root git repository path.
+ sec := rootCfg.Section("repository")
+ Repository.DisableHTTPGit = sec.Key("DISABLE_HTTP_GIT").MustBool()
+ Repository.UseCompatSSHURI = sec.Key("USE_COMPAT_SSH_URI").MustBool()
+ Repository.GoGetCloneURLProtocol = sec.Key("GO_GET_CLONE_URL_PROTOCOL").MustString("https")
+ Repository.MaxCreationLimit = sec.Key("MAX_CREATION_LIMIT").MustInt(-1)
+ Repository.DefaultBranch = sec.Key("DEFAULT_BRANCH").MustString(Repository.DefaultBranch)
+ RepoRootPath = sec.Key("ROOT").MustString(path.Join(AppDataPath, "forgejo-repositories"))
+ if !filepath.IsAbs(RepoRootPath) {
+ RepoRootPath = filepath.Join(AppWorkPath, RepoRootPath)
+ } else {
+ RepoRootPath = filepath.Clean(RepoRootPath)
+ }
+ defaultDetectedCharsetsOrder := make([]string, 0, len(Repository.DetectedCharsetsOrder))
+ for _, charset := range Repository.DetectedCharsetsOrder {
+ defaultDetectedCharsetsOrder = append(defaultDetectedCharsetsOrder, strings.ToLower(strings.TrimSpace(charset)))
+ }
+ ScriptType = sec.Key("SCRIPT_TYPE").MustString("bash")
+
+ if _, err := exec.LookPath(ScriptType); err != nil {
+ log.Warn("SCRIPT_TYPE %q is not on the current PATH. Are you sure that this is the correct SCRIPT_TYPE?", ScriptType)
+ }
+
+ if err = sec.MapTo(&Repository); err != nil {
+ log.Fatal("Failed to map Repository settings: %v", err)
+ } else if err = rootCfg.Section("repository.editor").MapTo(&Repository.Editor); err != nil {
+ log.Fatal("Failed to map Repository.Editor settings: %v", err)
+ } else if err = rootCfg.Section("repository.upload").MapTo(&Repository.Upload); err != nil {
+ log.Fatal("Failed to map Repository.Upload settings: %v", err)
+ } else if err = rootCfg.Section("repository.local").MapTo(&Repository.Local); err != nil {
+ log.Fatal("Failed to map Repository.Local settings: %v", err)
+ } else if err = rootCfg.Section("repository.pull-request").MapTo(&Repository.PullRequest); err != nil {
+ log.Fatal("Failed to map Repository.PullRequest settings: %v", err)
+ }
+
+ if !rootCfg.Section("packages").Key("ENABLED").MustBool(Packages.Enabled) {
+ Repository.DisabledRepoUnits = append(Repository.DisabledRepoUnits, "repo.packages")
+ }
+
+ if !rootCfg.Section("actions").Key("ENABLED").MustBool(Actions.Enabled) {
+ Repository.DisabledRepoUnits = append(Repository.DisabledRepoUnits, "repo.actions")
+ }
+
+ // Handle default trustmodel settings
+ Repository.Signing.DefaultTrustModel = strings.ToLower(strings.TrimSpace(Repository.Signing.DefaultTrustModel))
+ if Repository.Signing.DefaultTrustModel == "default" {
+ Repository.Signing.DefaultTrustModel = "collaborator"
+ }
+
+ // Handle preferred charset orders
+ preferred := make([]string, 0, len(Repository.DetectedCharsetsOrder))
+ for _, charset := range Repository.DetectedCharsetsOrder {
+ canonicalCharset := strings.ToLower(strings.TrimSpace(charset))
+ preferred = append(preferred, canonicalCharset)
+ // remove it from the defaults
+ for i, charset := range defaultDetectedCharsetsOrder {
+ if charset == canonicalCharset {
+ defaultDetectedCharsetsOrder = append(defaultDetectedCharsetsOrder[:i], defaultDetectedCharsetsOrder[i+1:]...)
+ break
+ }
+ }
+ }
+
+ i := 0
+ for _, charset := range preferred {
+ // Add the defaults
+ if charset == "defaults" {
+ for _, charset := range defaultDetectedCharsetsOrder {
+ canonicalCharset := strings.ToLower(strings.TrimSpace(charset))
+ if _, has := Repository.DetectedCharsetScore[canonicalCharset]; !has {
+ Repository.DetectedCharsetScore[canonicalCharset] = i
+ i++
+ }
+ }
+ continue
+ }
+ if _, has := Repository.DetectedCharsetScore[charset]; !has {
+ Repository.DetectedCharsetScore[charset] = i
+ i++
+ }
+ }
+
+ if !filepath.IsAbs(Repository.Upload.TempPath) {
+ Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath)
+ }
+
+ if err := loadRepoArchiveFrom(rootCfg); err != nil {
+ log.Fatal("loadRepoArchiveFrom: %v", err)
+ }
+ Repository.EnableFlags = sec.Key("ENABLE_FLAGS").MustBool()
+}
diff --git a/modules/setting/repository_archive.go b/modules/setting/repository_archive.go
new file mode 100644
index 0000000..9d24afa
--- /dev/null
+++ b/modules/setting/repository_archive.go
@@ -0,0 +1,25 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import "fmt"
+
+var RepoArchive = struct {
+ Storage *Storage
+}{}
+
+func loadRepoArchiveFrom(rootCfg ConfigProvider) (err error) {
+ sec, _ := rootCfg.GetSection("repo-archive")
+ if sec == nil {
+ RepoArchive.Storage, err = getStorage(rootCfg, "repo-archive", "", nil)
+ return err
+ }
+
+ if err := sec.MapTo(&RepoArchive); err != nil {
+ return fmt.Errorf("mapto repoarchive failed: %v", err)
+ }
+
+ RepoArchive.Storage, err = getStorage(rootCfg, "repo-archive", "", sec)
+ return err
+}
diff --git a/modules/setting/repository_archive_test.go b/modules/setting/repository_archive_test.go
new file mode 100644
index 0000000..d3901b6
--- /dev/null
+++ b/modules/setting/repository_archive_test.go
@@ -0,0 +1,112 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getStorageInheritNameSectionTypeForRepoArchive(t *testing.T) {
+ // packages storage inherits from storage if nothing configured
+ iniStr := `
+[storage]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+
+ assert.EqualValues(t, "minio", RepoArchive.Storage.Type)
+ assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+
+ // we can also configure packages storage directly
+ iniStr = `
+[storage.repo-archive]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+
+ assert.EqualValues(t, "minio", RepoArchive.Storage.Type)
+ assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+
+ // or we can indicate the storage type in the packages section
+ iniStr = `
+[repo-archive]
+STORAGE_TYPE = my_minio
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+
+ assert.EqualValues(t, "minio", RepoArchive.Storage.Type)
+ assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+
+ // or we can indicate the storage type and minio base path in the packages section
+ iniStr = `
+[repo-archive]
+STORAGE_TYPE = my_minio
+MINIO_BASE_PATH = my_archive/
+
+[storage.my_minio]
+STORAGE_TYPE = minio
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+
+ assert.EqualValues(t, "minio", RepoArchive.Storage.Type)
+ assert.EqualValues(t, "my_archive/", RepoArchive.Storage.MinioConfig.BasePath)
+}
+
+func Test_RepoArchiveStorage(t *testing.T) {
+ iniStr := `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[storage]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ storage := RepoArchive.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+
+ iniStr = `
+;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
+[storage.repo-archive]
+STORAGE_TYPE = s3
+[storage.s3]
+STORAGE_TYPE = minio
+MINIO_ENDPOINT = s3.my-domain.net
+MINIO_BUCKET = gitea
+MINIO_LOCATION = homenet
+MINIO_USE_SSL = true
+MINIO_ACCESS_KEY_ID = correct_key
+MINIO_SECRET_ACCESS_KEY = correct_key
+`
+ cfg, err = NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ storage = RepoArchive.Storage
+
+ assert.EqualValues(t, "minio", storage.Type)
+ assert.EqualValues(t, "gitea", storage.MinioConfig.Bucket)
+}
diff --git a/modules/setting/security.go b/modules/setting/security.go
new file mode 100644
index 0000000..678a57c
--- /dev/null
+++ b/modules/setting/security.go
@@ -0,0 +1,173 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/url"
+ "os"
+ "strings"
+
+ "code.gitea.io/gitea/modules/auth/password/hash"
+ "code.gitea.io/gitea/modules/generate"
+ "code.gitea.io/gitea/modules/keying"
+ "code.gitea.io/gitea/modules/log"
+)
+
+var (
+ // Security settings
+ InstallLock bool
+ SecretKey string
+ InternalToken string // internal access token
+ LogInRememberDays int
+ CookieRememberName string
+ ReverseProxyAuthUser string
+ ReverseProxyAuthEmail string
+ ReverseProxyAuthFullName string
+ ReverseProxyLimit int
+ ReverseProxyTrustedProxies []string
+ MinPasswordLength int
+ ImportLocalPaths bool
+ DisableGitHooks bool
+ DisableWebhooks bool
+ OnlyAllowPushIfGiteaEnvironmentSet bool
+ PasswordComplexity []string
+ PasswordHashAlgo string
+ PasswordCheckPwn bool
+ SuccessfulTokensCacheSize int
+ DisableQueryAuthToken bool
+ CSRFCookieName = "_csrf"
+ CSRFCookieHTTPOnly = true
+)
+
+// loadSecret load the secret from ini by uriKey or verbatimKey, only one of them could be set
+// If the secret is loaded from uriKey (file), the file should be non-empty, to guarantee the behavior stable and clear.
+func loadSecret(sec ConfigSection, uriKey, verbatimKey string) string {
+ // don't allow setting both URI and verbatim string
+ uri := sec.Key(uriKey).String()
+ verbatim := sec.Key(verbatimKey).String()
+ if uri != "" && verbatim != "" {
+ log.Fatal("Cannot specify both %s and %s", uriKey, verbatimKey)
+ }
+
+ // if we have no URI, use verbatim
+ if uri == "" {
+ return verbatim
+ }
+
+ tempURI, err := url.Parse(uri)
+ if err != nil {
+ log.Fatal("Failed to parse %s (%s): %v", uriKey, uri, err)
+ }
+ switch tempURI.Scheme {
+ case "file":
+ buf, err := os.ReadFile(tempURI.RequestURI())
+ if err != nil {
+ log.Fatal("Failed to read %s (%s): %v", uriKey, tempURI.RequestURI(), err)
+ }
+ val := strings.TrimSpace(string(buf))
+ if val == "" {
+ // The file shouldn't be empty, otherwise we can not know whether the user has ever set the KEY or KEY_URI
+ // For example: if INTERNAL_TOKEN_URI=file:///empty-file,
+ // Then if the token is re-generated during installation and saved to INTERNAL_TOKEN
+ // Then INTERNAL_TOKEN and INTERNAL_TOKEN_URI both exist, that's a fatal error (they shouldn't)
+ log.Fatal("Failed to read %s (%s): the file is empty", uriKey, tempURI.RequestURI())
+ }
+ return val
+
+ // only file URIs are allowed
+ default:
+ log.Fatal("Unsupported URI-Scheme %q (%q = %q)", tempURI.Scheme, uriKey, uri)
+ return ""
+ }
+}
+
+// generateSaveInternalToken generates and saves the internal token to app.ini
+func generateSaveInternalToken(rootCfg ConfigProvider) {
+ token, err := generate.NewInternalToken()
+ if err != nil {
+ log.Fatal("Error generate internal token: %v", err)
+ }
+
+ InternalToken = token
+ saveCfg, err := rootCfg.PrepareSaving()
+ if err != nil {
+ log.Fatal("Error saving internal token: %v", err)
+ }
+ rootCfg.Section("security").Key("INTERNAL_TOKEN").SetValue(token)
+ saveCfg.Section("security").Key("INTERNAL_TOKEN").SetValue(token)
+ if err = saveCfg.Save(); err != nil {
+ log.Fatal("Error saving internal token: %v", err)
+ }
+}
+
+func loadSecurityFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("security")
+ InstallLock = HasInstallLock(rootCfg)
+ LogInRememberDays = sec.Key("LOGIN_REMEMBER_DAYS").MustInt(31)
+ SecretKey = loadSecret(sec, "SECRET_KEY_URI", "SECRET_KEY")
+ if SecretKey == "" {
+ // FIXME: https://github.com/go-gitea/gitea/issues/16832
+ // Until it supports rotating an existing secret key, we shouldn't move users off of the widely used default value
+ SecretKey = "!#@FDEWREWR&*(" //nolint:gosec
+ }
+ keying.Init([]byte(SecretKey))
+
+ CookieRememberName = sec.Key("COOKIE_REMEMBER_NAME").MustString("gitea_incredible")
+
+ ReverseProxyAuthUser = sec.Key("REVERSE_PROXY_AUTHENTICATION_USER").MustString("X-WEBAUTH-USER")
+ ReverseProxyAuthEmail = sec.Key("REVERSE_PROXY_AUTHENTICATION_EMAIL").MustString("X-WEBAUTH-EMAIL")
+ ReverseProxyAuthFullName = sec.Key("REVERSE_PROXY_AUTHENTICATION_FULL_NAME").MustString("X-WEBAUTH-FULLNAME")
+
+ ReverseProxyLimit = sec.Key("REVERSE_PROXY_LIMIT").MustInt(1)
+ ReverseProxyTrustedProxies = sec.Key("REVERSE_PROXY_TRUSTED_PROXIES").Strings(",")
+ if len(ReverseProxyTrustedProxies) == 0 {
+ ReverseProxyTrustedProxies = []string{"127.0.0.0/8", "::1/128"}
+ }
+
+ MinPasswordLength = sec.Key("MIN_PASSWORD_LENGTH").MustInt(8)
+ ImportLocalPaths = sec.Key("IMPORT_LOCAL_PATHS").MustBool(false)
+ DisableGitHooks = sec.Key("DISABLE_GIT_HOOKS").MustBool(true)
+ DisableWebhooks = sec.Key("DISABLE_WEBHOOKS").MustBool(false)
+ OnlyAllowPushIfGiteaEnvironmentSet = sec.Key("ONLY_ALLOW_PUSH_IF_GITEA_ENVIRONMENT_SET").MustBool(true)
+
+ // Ensure that the provided default hash algorithm is a valid hash algorithm
+ var algorithm *hash.PasswordHashAlgorithm
+ PasswordHashAlgo, algorithm = hash.SetDefaultPasswordHashAlgorithm(sec.Key("PASSWORD_HASH_ALGO").MustString(""))
+ if algorithm == nil {
+ log.Fatal("The provided password hash algorithm was invalid: %s", sec.Key("PASSWORD_HASH_ALGO").MustString(""))
+ }
+
+ CSRFCookieHTTPOnly = sec.Key("CSRF_COOKIE_HTTP_ONLY").MustBool(true)
+ PasswordCheckPwn = sec.Key("PASSWORD_CHECK_PWN").MustBool(false)
+ SuccessfulTokensCacheSize = sec.Key("SUCCESSFUL_TOKENS_CACHE_SIZE").MustInt(20)
+
+ InternalToken = loadSecret(sec, "INTERNAL_TOKEN_URI", "INTERNAL_TOKEN")
+ if InstallLock && InternalToken == "" {
+ // if Gitea has been installed but the InternalToken hasn't been generated (upgrade from an old release), we should generate
+ // some users do cluster deployment, they still depend on this auto-generating behavior.
+ generateSaveInternalToken(rootCfg)
+ }
+
+ cfgdata := sec.Key("PASSWORD_COMPLEXITY").Strings(",")
+ if len(cfgdata) == 0 {
+ cfgdata = []string{"off"}
+ }
+ PasswordComplexity = make([]string, 0, len(cfgdata))
+ for _, name := range cfgdata {
+ name := strings.ToLower(strings.Trim(name, `"`))
+ if name != "" {
+ PasswordComplexity = append(PasswordComplexity, name)
+ }
+ }
+
+ sectionHasDisableQueryAuthToken := sec.HasKey("DISABLE_QUERY_AUTH_TOKEN")
+
+ // TODO: default value should be true in future releases
+ DisableQueryAuthToken = sec.Key("DISABLE_QUERY_AUTH_TOKEN").MustBool(false)
+
+ // warn if the setting is set to false explicitly
+ if sectionHasDisableQueryAuthToken && !DisableQueryAuthToken {
+ log.Warn("Enabling Query API Auth tokens is not recommended. DISABLE_QUERY_AUTH_TOKEN will default to true in gitea 1.23 and will be removed in gitea 1.24.")
+ }
+}
diff --git a/modules/setting/server.go b/modules/setting/server.go
new file mode 100644
index 0000000..5cc33f6
--- /dev/null
+++ b/modules/setting/server.go
@@ -0,0 +1,368 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "encoding/base64"
+ "net"
+ "net/url"
+ "path"
+ "path/filepath"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+)
+
+// Scheme describes protocol types
+type Scheme string
+
+// enumerates all the scheme types
+const (
+ HTTP Scheme = "http"
+ HTTPS Scheme = "https"
+ FCGI Scheme = "fcgi"
+ FCGIUnix Scheme = "fcgi+unix"
+ HTTPUnix Scheme = "http+unix"
+)
+
+// LandingPage describes the default page
+type LandingPage string
+
+// enumerates all the landing page types
+const (
+ LandingPageHome LandingPage = "/"
+ LandingPageExplore LandingPage = "/explore"
+ LandingPageOrganizations LandingPage = "/explore/organizations"
+ LandingPageLogin LandingPage = "/user/login"
+)
+
+var (
+ // AppName is the Application name, used in the page title.
+ // It maps to ini:"APP_NAME"
+ AppName string
+ // AppSlogan is the Application slogan.
+ // It maps to ini:"APP_SLOGAN"
+ AppSlogan string
+ // AppDisplayNameFormat defines how the AppDisplayName should be presented
+ // It maps to ini:"APP_DISPLAY_NAME_FORMAT"
+ AppDisplayNameFormat string
+ // AppDisplayName is the display name for the application, defined following AppDisplayNameFormat
+ AppDisplayName string
+ // AppURL is the Application ROOT_URL. It always has a '/' suffix
+ // It maps to ini:"ROOT_URL"
+ AppURL string
+ // AppSubURL represents the sub-url mounting point for gitea. It is either "" or starts with '/' and ends without '/', such as '/{subpath}'.
+ // This value is empty if site does not have sub-url.
+ AppSubURL string
+ // AppDataPath is the default path for storing data.
+ // It maps to ini:"APP_DATA_PATH" in [server] and defaults to AppWorkPath + "/data"
+ AppDataPath string
+ // LocalURL is the url for locally running applications to contact Gitea. It always has a '/' suffix
+ // It maps to ini:"LOCAL_ROOT_URL" in [server]
+ LocalURL string
+ // AssetVersion holds a opaque value that is used for cache-busting assets
+ AssetVersion string
+
+ // Server settings
+
+ Protocol Scheme
+ UseProxyProtocol bool // `ini:"USE_PROXY_PROTOCOL"`
+ ProxyProtocolTLSBridging bool //`ini:"PROXY_PROTOCOL_TLS_BRIDGING"`
+ ProxyProtocolHeaderTimeout time.Duration
+ ProxyProtocolAcceptUnknown bool
+ Domain string
+ HTTPAddr string
+ HTTPPort string
+ LocalUseProxyProtocol bool
+ RedirectOtherPort bool
+ RedirectorUseProxyProtocol bool
+ PortToRedirect string
+ OfflineMode bool
+ CertFile string
+ KeyFile string
+ StaticRootPath string
+ StaticCacheTime time.Duration
+ EnableGzip bool
+ LandingPageURL LandingPage
+ UnixSocketPermission uint32
+ EnablePprof bool
+ PprofDataPath string
+ EnableAcme bool
+ AcmeTOS bool
+ AcmeLiveDirectory string
+ AcmeEmail string
+ AcmeURL string
+ AcmeCARoot string
+ SSLMinimumVersion string
+ SSLMaximumVersion string
+ SSLCurvePreferences []string
+ SSLCipherSuites []string
+ GracefulRestartable bool
+ GracefulHammerTime time.Duration
+ StartupTimeout time.Duration
+ PerWriteTimeout = 30 * time.Second
+ PerWritePerKbTimeout = 10 * time.Second
+ StaticURLPrefix string
+ AbsoluteAssetURL string
+
+ ManifestData string
+)
+
+// MakeManifestData generates web app manifest JSON
+func MakeManifestData(appName, appURL, absoluteAssetURL string) []byte {
+ type manifestIcon struct {
+ Src string `json:"src"`
+ Type string `json:"type"`
+ Sizes string `json:"sizes"`
+ }
+
+ type manifestJSON struct {
+ Name string `json:"name"`
+ ShortName string `json:"short_name"`
+ StartURL string `json:"start_url"`
+ Icons []manifestIcon `json:"icons"`
+ }
+
+ bytes, err := json.Marshal(&manifestJSON{
+ Name: appName,
+ ShortName: appName,
+ StartURL: appURL,
+ Icons: []manifestIcon{
+ {
+ Src: absoluteAssetURL + "/assets/img/logo.png",
+ Type: "image/png",
+ Sizes: "512x512",
+ },
+ {
+ Src: absoluteAssetURL + "/assets/img/logo.svg",
+ Type: "image/svg+xml",
+ Sizes: "512x512",
+ },
+ },
+ })
+ if err != nil {
+ log.Error("unable to marshal manifest JSON. Error: %v", err)
+ return make([]byte, 0)
+ }
+
+ return bytes
+}
+
+// MakeAbsoluteAssetURL returns the absolute asset url prefix without a trailing slash
+func MakeAbsoluteAssetURL(appURL, staticURLPrefix string) string {
+ parsedPrefix, err := url.Parse(strings.TrimSuffix(staticURLPrefix, "/"))
+ if err != nil {
+ log.Fatal("Unable to parse STATIC_URL_PREFIX: %v", err)
+ }
+
+ if err == nil && parsedPrefix.Hostname() == "" {
+ if staticURLPrefix == "" {
+ return strings.TrimSuffix(appURL, "/")
+ }
+
+ // StaticURLPrefix is just a path
+ return util.URLJoin(appURL, strings.TrimSuffix(staticURLPrefix, "/"))
+ }
+
+ return strings.TrimSuffix(staticURLPrefix, "/")
+}
+
+func generateDisplayName() string {
+ appDisplayName := AppName
+ if AppSlogan != "" {
+ appDisplayName = strings.Replace(AppDisplayNameFormat, "{APP_NAME}", AppName, 1)
+ appDisplayName = strings.Replace(appDisplayName, "{APP_SLOGAN}", AppSlogan, 1)
+ }
+ return appDisplayName
+}
+
+func loadServerFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("server")
+ AppName = rootCfg.Section("").Key("APP_NAME").MustString("Forgejo: Beyond coding. We Forge.")
+ AppSlogan = rootCfg.Section("").Key("APP_SLOGAN").MustString("")
+ AppDisplayNameFormat = rootCfg.Section("").Key("APP_DISPLAY_NAME_FORMAT").MustString("{APP_NAME}: {APP_SLOGAN}")
+ AppDisplayName = generateDisplayName()
+ Domain = sec.Key("DOMAIN").MustString("localhost")
+ HTTPAddr = sec.Key("HTTP_ADDR").MustString("0.0.0.0")
+ HTTPPort = sec.Key("HTTP_PORT").MustString("3000")
+
+ Protocol = HTTP
+ protocolCfg := sec.Key("PROTOCOL").String()
+ switch protocolCfg {
+ case "https":
+ Protocol = HTTPS
+
+ // DEPRECATED should not be removed because users maybe upgrade from lower version to the latest version
+ // if these are removed, the warning will not be shown
+ if sec.HasKey("ENABLE_ACME") {
+ EnableAcme = sec.Key("ENABLE_ACME").MustBool(false)
+ } else {
+ deprecatedSetting(rootCfg, "server", "ENABLE_LETSENCRYPT", "server", "ENABLE_ACME", "v1.19.0")
+ EnableAcme = sec.Key("ENABLE_LETSENCRYPT").MustBool(false)
+ }
+ if EnableAcme {
+ AcmeURL = sec.Key("ACME_URL").MustString("")
+ AcmeCARoot = sec.Key("ACME_CA_ROOT").MustString("")
+
+ if sec.HasKey("ACME_ACCEPTTOS") {
+ AcmeTOS = sec.Key("ACME_ACCEPTTOS").MustBool(false)
+ } else {
+ deprecatedSetting(rootCfg, "server", "LETSENCRYPT_ACCEPTTOS", "server", "ACME_ACCEPTTOS", "v1.19.0")
+ AcmeTOS = sec.Key("LETSENCRYPT_ACCEPTTOS").MustBool(false)
+ }
+ if !AcmeTOS {
+ log.Fatal("ACME TOS is not accepted (ACME_ACCEPTTOS).")
+ }
+
+ if sec.HasKey("ACME_DIRECTORY") {
+ AcmeLiveDirectory = sec.Key("ACME_DIRECTORY").MustString("https")
+ } else {
+ deprecatedSetting(rootCfg, "server", "LETSENCRYPT_DIRECTORY", "server", "ACME_DIRECTORY", "v1.19.0")
+ AcmeLiveDirectory = sec.Key("LETSENCRYPT_DIRECTORY").MustString("https")
+ }
+
+ if sec.HasKey("ACME_EMAIL") {
+ AcmeEmail = sec.Key("ACME_EMAIL").MustString("")
+ } else {
+ deprecatedSetting(rootCfg, "server", "LETSENCRYPT_EMAIL", "server", "ACME_EMAIL", "v1.19.0")
+ AcmeEmail = sec.Key("LETSENCRYPT_EMAIL").MustString("")
+ }
+ } else {
+ CertFile = sec.Key("CERT_FILE").String()
+ KeyFile = sec.Key("KEY_FILE").String()
+ if len(CertFile) > 0 && !filepath.IsAbs(CertFile) {
+ CertFile = filepath.Join(CustomPath, CertFile)
+ }
+ if len(KeyFile) > 0 && !filepath.IsAbs(KeyFile) {
+ KeyFile = filepath.Join(CustomPath, KeyFile)
+ }
+ }
+ SSLMinimumVersion = sec.Key("SSL_MIN_VERSION").MustString("")
+ SSLMaximumVersion = sec.Key("SSL_MAX_VERSION").MustString("")
+ SSLCurvePreferences = sec.Key("SSL_CURVE_PREFERENCES").Strings(",")
+ SSLCipherSuites = sec.Key("SSL_CIPHER_SUITES").Strings(",")
+ case "fcgi":
+ Protocol = FCGI
+ case "fcgi+unix", "unix", "http+unix":
+ switch protocolCfg {
+ case "fcgi+unix":
+ Protocol = FCGIUnix
+ case "unix":
+ log.Warn("unix PROTOCOL value is deprecated, please use http+unix")
+ fallthrough
+ case "http+unix":
+ Protocol = HTTPUnix
+ }
+ UnixSocketPermissionRaw := sec.Key("UNIX_SOCKET_PERMISSION").MustString("666")
+ UnixSocketPermissionParsed, err := strconv.ParseUint(UnixSocketPermissionRaw, 8, 32)
+ if err != nil || UnixSocketPermissionParsed > 0o777 {
+ log.Fatal("Failed to parse unixSocketPermission: %s", UnixSocketPermissionRaw)
+ }
+
+ UnixSocketPermission = uint32(UnixSocketPermissionParsed)
+ if !filepath.IsAbs(HTTPAddr) {
+ HTTPAddr = filepath.Join(AppWorkPath, HTTPAddr)
+ }
+ }
+ UseProxyProtocol = sec.Key("USE_PROXY_PROTOCOL").MustBool(false)
+ ProxyProtocolTLSBridging = sec.Key("PROXY_PROTOCOL_TLS_BRIDGING").MustBool(false)
+ ProxyProtocolHeaderTimeout = sec.Key("PROXY_PROTOCOL_HEADER_TIMEOUT").MustDuration(5 * time.Second)
+ ProxyProtocolAcceptUnknown = sec.Key("PROXY_PROTOCOL_ACCEPT_UNKNOWN").MustBool(false)
+ GracefulRestartable = sec.Key("ALLOW_GRACEFUL_RESTARTS").MustBool(true)
+ GracefulHammerTime = sec.Key("GRACEFUL_HAMMER_TIME").MustDuration(60 * time.Second)
+ StartupTimeout = sec.Key("STARTUP_TIMEOUT").MustDuration(0 * time.Second)
+ PerWriteTimeout = sec.Key("PER_WRITE_TIMEOUT").MustDuration(PerWriteTimeout)
+ PerWritePerKbTimeout = sec.Key("PER_WRITE_PER_KB_TIMEOUT").MustDuration(PerWritePerKbTimeout)
+
+ defaultAppURL := string(Protocol) + "://" + Domain + ":" + HTTPPort
+ AppURL = sec.Key("ROOT_URL").MustString(defaultAppURL)
+
+ // Check validity of AppURL
+ appURL, err := url.Parse(AppURL)
+ if err != nil {
+ log.Fatal("Invalid ROOT_URL '%s': %s", AppURL, err)
+ }
+ // Remove default ports from AppURL.
+ // (scheme-based URL normalization, RFC 3986 section 6.2.3)
+ if (appURL.Scheme == string(HTTP) && appURL.Port() == "80") || (appURL.Scheme == string(HTTPS) && appURL.Port() == "443") {
+ appURL.Host = appURL.Hostname()
+ }
+ // This should be TrimRight to ensure that there is only a single '/' at the end of AppURL.
+ AppURL = strings.TrimRight(appURL.String(), "/") + "/"
+
+ // Suburl should start with '/' and end without '/', such as '/{subpath}'.
+ // This value is empty if site does not have sub-url.
+ AppSubURL = strings.TrimSuffix(appURL.Path, "/")
+ StaticURLPrefix = strings.TrimSuffix(sec.Key("STATIC_URL_PREFIX").MustString(AppSubURL), "/")
+
+ // Check if Domain differs from AppURL domain than update it to AppURL's domain
+ urlHostname := appURL.Hostname()
+ if urlHostname != Domain && net.ParseIP(urlHostname) == nil && urlHostname != "" {
+ Domain = urlHostname
+ }
+
+ AbsoluteAssetURL = MakeAbsoluteAssetURL(AppURL, StaticURLPrefix)
+ AssetVersion = strings.ReplaceAll(AppVer, "+", "~") // make sure the version string is clear (no real escaping is needed)
+
+ manifestBytes := MakeManifestData(AppName, AppURL, AbsoluteAssetURL)
+ ManifestData = `application/json;base64,` + base64.StdEncoding.EncodeToString(manifestBytes)
+
+ var defaultLocalURL string
+ switch Protocol {
+ case HTTPUnix:
+ defaultLocalURL = "http://unix/"
+ case FCGI:
+ defaultLocalURL = AppURL
+ case FCGIUnix:
+ defaultLocalURL = AppURL
+ default:
+ defaultLocalURL = string(Protocol) + "://"
+ if HTTPAddr == "0.0.0.0" {
+ defaultLocalURL += net.JoinHostPort("localhost", HTTPPort) + "/"
+ } else {
+ defaultLocalURL += net.JoinHostPort(HTTPAddr, HTTPPort) + "/"
+ }
+ }
+ LocalURL = sec.Key("LOCAL_ROOT_URL").MustString(defaultLocalURL)
+ LocalURL = strings.TrimRight(LocalURL, "/") + "/"
+ LocalUseProxyProtocol = sec.Key("LOCAL_USE_PROXY_PROTOCOL").MustBool(UseProxyProtocol)
+ RedirectOtherPort = sec.Key("REDIRECT_OTHER_PORT").MustBool(false)
+ PortToRedirect = sec.Key("PORT_TO_REDIRECT").MustString("80")
+ RedirectorUseProxyProtocol = sec.Key("REDIRECTOR_USE_PROXY_PROTOCOL").MustBool(UseProxyProtocol)
+ OfflineMode = sec.Key("OFFLINE_MODE").MustBool(true)
+ if len(StaticRootPath) == 0 {
+ StaticRootPath = AppWorkPath
+ }
+ StaticRootPath = sec.Key("STATIC_ROOT_PATH").MustString(StaticRootPath)
+ StaticCacheTime = sec.Key("STATIC_CACHE_TIME").MustDuration(6 * time.Hour)
+ AppDataPath = sec.Key("APP_DATA_PATH").MustString(path.Join(AppWorkPath, "data"))
+ if !filepath.IsAbs(AppDataPath) {
+ AppDataPath = filepath.ToSlash(filepath.Join(AppWorkPath, AppDataPath))
+ }
+
+ EnableGzip = sec.Key("ENABLE_GZIP").MustBool()
+ EnablePprof = sec.Key("ENABLE_PPROF").MustBool(false)
+ PprofDataPath = sec.Key("PPROF_DATA_PATH").MustString(path.Join(AppWorkPath, "data/tmp/pprof"))
+ if !filepath.IsAbs(PprofDataPath) {
+ PprofDataPath = filepath.Join(AppWorkPath, PprofDataPath)
+ }
+
+ landingPage := sec.Key("LANDING_PAGE").MustString("home")
+ switch landingPage {
+ case "explore":
+ LandingPageURL = LandingPageExplore
+ case "organizations":
+ LandingPageURL = LandingPageOrganizations
+ case "login":
+ LandingPageURL = LandingPageLogin
+ case "", "home":
+ LandingPageURL = LandingPageHome
+ default:
+ LandingPageURL = LandingPage(landingPage)
+ }
+}
diff --git a/modules/setting/server_test.go b/modules/setting/server_test.go
new file mode 100644
index 0000000..8db8168
--- /dev/null
+++ b/modules/setting/server_test.go
@@ -0,0 +1,36 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDisplayNameDefault(t *testing.T) {
+ defer test.MockVariableValue(&AppName, "Forgejo")()
+ defer test.MockVariableValue(&AppSlogan, "Beyond coding. We Forge.")()
+ defer test.MockVariableValue(&AppDisplayNameFormat, "{APP_NAME}: {APP_SLOGAN}")()
+ displayName := generateDisplayName()
+ assert.Equal(t, "Forgejo: Beyond coding. We Forge.", displayName)
+}
+
+func TestDisplayNameEmptySlogan(t *testing.T) {
+ defer test.MockVariableValue(&AppName, "Forgejo")()
+ defer test.MockVariableValue(&AppSlogan, "")()
+ defer test.MockVariableValue(&AppDisplayNameFormat, "{APP_NAME}: {APP_SLOGAN}")()
+ displayName := generateDisplayName()
+ assert.Equal(t, "Forgejo", displayName)
+}
+
+func TestDisplayNameCustomFormat(t *testing.T) {
+ defer test.MockVariableValue(&AppName, "Forgejo")()
+ defer test.MockVariableValue(&AppSlogan, "Beyond coding. We Forge.")()
+ defer test.MockVariableValue(&AppDisplayNameFormat, "{APP_NAME} - {APP_SLOGAN}")()
+ displayName := generateDisplayName()
+ assert.Equal(t, "Forgejo - Beyond coding. We Forge.", displayName)
+}
diff --git a/modules/setting/service.go b/modules/setting/service.go
new file mode 100644
index 0000000..afaee18
--- /dev/null
+++ b/modules/setting/service.go
@@ -0,0 +1,262 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "regexp"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/gobwas/glob"
+)
+
+// enumerates all the types of captchas
+const (
+ ImageCaptcha = "image"
+ ReCaptcha = "recaptcha"
+ HCaptcha = "hcaptcha"
+ MCaptcha = "mcaptcha"
+ CfTurnstile = "cfturnstile"
+)
+
+// Service settings
+var Service = struct {
+ DefaultUserVisibility string
+ DefaultUserVisibilityMode structs.VisibleType
+ AllowedUserVisibilityModes []string
+ AllowedUserVisibilityModesSlice AllowedVisibility `ini:"-"`
+ DefaultOrgVisibility string
+ DefaultOrgVisibilityMode structs.VisibleType
+ ActiveCodeLives int
+ ResetPwdCodeLives int
+ RegisterEmailConfirm bool
+ RegisterManualConfirm bool
+ EmailDomainAllowList []glob.Glob
+ EmailDomainBlockList []glob.Glob
+ DisableRegistration bool
+ AllowOnlyInternalRegistration bool
+ AllowOnlyExternalRegistration bool
+ ShowRegistrationButton bool
+ ShowMilestonesDashboardPage bool
+ RequireSignInView bool
+ EnableNotifyMail bool
+ EnableBasicAuth bool
+ EnableReverseProxyAuth bool
+ EnableReverseProxyAuthAPI bool
+ EnableReverseProxyAutoRegister bool
+ EnableReverseProxyEmail bool
+ EnableReverseProxyFullName bool
+ EnableCaptcha bool
+ RequireCaptchaForLogin bool
+ RequireExternalRegistrationCaptcha bool
+ RequireExternalRegistrationPassword bool
+ CaptchaType string
+ RecaptchaSecret string
+ RecaptchaSitekey string
+ RecaptchaURL string
+ CfTurnstileSecret string
+ CfTurnstileSitekey string
+ HcaptchaSecret string
+ HcaptchaSitekey string
+ McaptchaSecret string
+ McaptchaSitekey string
+ McaptchaURL string
+ DefaultKeepEmailPrivate bool
+ DefaultAllowCreateOrganization bool
+ DefaultUserIsRestricted bool
+ AllowDotsInUsernames bool
+ EnableTimetracking bool
+ DefaultEnableTimetracking bool
+ DefaultEnableDependencies bool
+ AllowCrossRepositoryDependencies bool
+ DefaultAllowOnlyContributorsToTrackTime bool
+ NoReplyAddress string
+ UserLocationMapURL string
+ EnableUserHeatmap bool
+ AutoWatchNewRepos bool
+ AutoWatchOnChanges bool
+ DefaultOrgMemberVisible bool
+ UserDeleteWithCommentsMaxTime time.Duration
+ ValidSiteURLSchemes []string
+
+ // OpenID settings
+ EnableOpenIDSignIn bool
+ EnableOpenIDSignUp bool
+ OpenIDWhitelist []*regexp.Regexp
+ OpenIDBlacklist []*regexp.Regexp
+
+ // Explore page settings
+ Explore struct {
+ RequireSigninView bool `ini:"REQUIRE_SIGNIN_VIEW"`
+ DisableUsersPage bool `ini:"DISABLE_USERS_PAGE"`
+ } `ini:"service.explore"`
+}{
+ AllowedUserVisibilityModesSlice: []bool{true, true, true},
+}
+
+// AllowedVisibility store in a 3 item bool array what is allowed
+type AllowedVisibility []bool
+
+// IsAllowedVisibility check if a AllowedVisibility allow a specific VisibleType
+func (a AllowedVisibility) IsAllowedVisibility(t structs.VisibleType) bool {
+ if int(t) >= len(a) {
+ return false
+ }
+ return a[t]
+}
+
+// ToVisibleTypeSlice convert a AllowedVisibility into a VisibleType slice
+func (a AllowedVisibility) ToVisibleTypeSlice() (result []structs.VisibleType) {
+ for i, v := range a {
+ if v {
+ result = append(result, structs.VisibleType(i))
+ }
+ }
+ return result
+}
+
+func CompileEmailGlobList(sec ConfigSection, keys ...string) (globs []glob.Glob) {
+ for _, key := range keys {
+ list := sec.Key(key).Strings(",")
+ for _, s := range list {
+ if g, err := glob.Compile(s); err == nil {
+ globs = append(globs, g)
+ } else {
+ log.Error("Skip invalid email allow/block list expression %q: %v", s, err)
+ }
+ }
+ }
+ return globs
+}
+
+func loadServiceFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("service")
+ Service.ActiveCodeLives = sec.Key("ACTIVE_CODE_LIVE_MINUTES").MustInt(180)
+ Service.ResetPwdCodeLives = sec.Key("RESET_PASSWD_CODE_LIVE_MINUTES").MustInt(180)
+ Service.DisableRegistration = sec.Key("DISABLE_REGISTRATION").MustBool()
+ Service.AllowOnlyInternalRegistration = sec.Key("ALLOW_ONLY_INTERNAL_REGISTRATION").MustBool()
+ Service.AllowOnlyExternalRegistration = sec.Key("ALLOW_ONLY_EXTERNAL_REGISTRATION").MustBool()
+ if Service.AllowOnlyExternalRegistration && Service.AllowOnlyInternalRegistration {
+ log.Warn("ALLOW_ONLY_INTERNAL_REGISTRATION and ALLOW_ONLY_EXTERNAL_REGISTRATION are true - disabling registration")
+ Service.DisableRegistration = true
+ }
+ if !sec.Key("REGISTER_EMAIL_CONFIRM").MustBool() {
+ Service.RegisterManualConfirm = sec.Key("REGISTER_MANUAL_CONFIRM").MustBool(false)
+ } else {
+ Service.RegisterManualConfirm = false
+ }
+ if sec.HasKey("EMAIL_DOMAIN_WHITELIST") {
+ deprecatedSetting(rootCfg, "service", "EMAIL_DOMAIN_WHITELIST", "service", "EMAIL_DOMAIN_ALLOWLIST", "1.21")
+ }
+ Service.EmailDomainAllowList = CompileEmailGlobList(sec, "EMAIL_DOMAIN_WHITELIST", "EMAIL_DOMAIN_ALLOWLIST")
+ Service.EmailDomainBlockList = CompileEmailGlobList(sec, "EMAIL_DOMAIN_BLOCKLIST")
+ Service.ShowRegistrationButton = sec.Key("SHOW_REGISTRATION_BUTTON").MustBool(!(Service.DisableRegistration || Service.AllowOnlyExternalRegistration))
+ Service.ShowMilestonesDashboardPage = sec.Key("SHOW_MILESTONES_DASHBOARD_PAGE").MustBool(true)
+ Service.RequireSignInView = sec.Key("REQUIRE_SIGNIN_VIEW").MustBool()
+ Service.EnableBasicAuth = sec.Key("ENABLE_BASIC_AUTHENTICATION").MustBool(true)
+ Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
+ Service.EnableReverseProxyAuthAPI = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION_API").MustBool()
+ Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()
+ Service.EnableReverseProxyEmail = sec.Key("ENABLE_REVERSE_PROXY_EMAIL").MustBool()
+ Service.EnableReverseProxyFullName = sec.Key("ENABLE_REVERSE_PROXY_FULL_NAME").MustBool()
+ Service.EnableCaptcha = sec.Key("ENABLE_CAPTCHA").MustBool(false)
+ Service.RequireCaptchaForLogin = sec.Key("REQUIRE_CAPTCHA_FOR_LOGIN").MustBool(false)
+ Service.RequireExternalRegistrationCaptcha = sec.Key("REQUIRE_EXTERNAL_REGISTRATION_CAPTCHA").MustBool(Service.EnableCaptcha)
+ Service.RequireExternalRegistrationPassword = sec.Key("REQUIRE_EXTERNAL_REGISTRATION_PASSWORD").MustBool()
+ Service.CaptchaType = sec.Key("CAPTCHA_TYPE").MustString(ImageCaptcha)
+ Service.RecaptchaSecret = sec.Key("RECAPTCHA_SECRET").MustString("")
+ Service.RecaptchaSitekey = sec.Key("RECAPTCHA_SITEKEY").MustString("")
+ Service.RecaptchaURL = sec.Key("RECAPTCHA_URL").MustString("https://www.google.com/recaptcha/")
+ Service.CfTurnstileSecret = sec.Key("CF_TURNSTILE_SECRET").MustString("")
+ Service.CfTurnstileSitekey = sec.Key("CF_TURNSTILE_SITEKEY").MustString("")
+ Service.HcaptchaSecret = sec.Key("HCAPTCHA_SECRET").MustString("")
+ Service.HcaptchaSitekey = sec.Key("HCAPTCHA_SITEKEY").MustString("")
+ Service.McaptchaURL = sec.Key("MCAPTCHA_URL").MustString("https://demo.mcaptcha.org/")
+ Service.McaptchaSecret = sec.Key("MCAPTCHA_SECRET").MustString("")
+ Service.McaptchaSitekey = sec.Key("MCAPTCHA_SITEKEY").MustString("")
+ Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
+ Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
+ Service.DefaultUserIsRestricted = sec.Key("DEFAULT_USER_IS_RESTRICTED").MustBool(false)
+ Service.AllowDotsInUsernames = sec.Key("ALLOW_DOTS_IN_USERNAMES").MustBool(true)
+ Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
+ if Service.EnableTimetracking {
+ Service.DefaultEnableTimetracking = sec.Key("DEFAULT_ENABLE_TIMETRACKING").MustBool(true)
+ }
+ Service.DefaultEnableDependencies = sec.Key("DEFAULT_ENABLE_DEPENDENCIES").MustBool(true)
+ Service.AllowCrossRepositoryDependencies = sec.Key("ALLOW_CROSS_REPOSITORY_DEPENDENCIES").MustBool(true)
+ Service.DefaultAllowOnlyContributorsToTrackTime = sec.Key("DEFAULT_ALLOW_ONLY_CONTRIBUTORS_TO_TRACK_TIME").MustBool(true)
+ Service.NoReplyAddress = sec.Key("NO_REPLY_ADDRESS").MustString("noreply." + Domain)
+ Service.UserLocationMapURL = sec.Key("USER_LOCATION_MAP_URL").MustString("https://www.openstreetmap.org/search?query=")
+ Service.EnableUserHeatmap = sec.Key("ENABLE_USER_HEATMAP").MustBool(true)
+ Service.AutoWatchNewRepos = sec.Key("AUTO_WATCH_NEW_REPOS").MustBool(true)
+ Service.AutoWatchOnChanges = sec.Key("AUTO_WATCH_ON_CHANGES").MustBool(false)
+ modes := sec.Key("ALLOWED_USER_VISIBILITY_MODES").Strings(",")
+ if len(modes) != 0 {
+ Service.AllowedUserVisibilityModes = []string{}
+ Service.AllowedUserVisibilityModesSlice = []bool{false, false, false}
+ for _, sMode := range modes {
+ if tp, ok := structs.VisibilityModes[sMode]; ok { // remove unsupported modes
+ Service.AllowedUserVisibilityModes = append(Service.AllowedUserVisibilityModes, sMode)
+ Service.AllowedUserVisibilityModesSlice[tp] = true
+ } else {
+ log.Warn("ALLOWED_USER_VISIBILITY_MODES %s is unsupported", sMode)
+ }
+ }
+ }
+
+ if len(Service.AllowedUserVisibilityModes) == 0 {
+ Service.AllowedUserVisibilityModes = []string{"public", "limited", "private"}
+ Service.AllowedUserVisibilityModesSlice = []bool{true, true, true}
+ }
+
+ Service.DefaultUserVisibility = sec.Key("DEFAULT_USER_VISIBILITY").String()
+ if Service.DefaultUserVisibility == "" {
+ Service.DefaultUserVisibility = Service.AllowedUserVisibilityModes[0]
+ } else if !Service.AllowedUserVisibilityModesSlice[structs.VisibilityModes[Service.DefaultUserVisibility]] {
+ log.Warn("DEFAULT_USER_VISIBILITY %s is wrong or not in ALLOWED_USER_VISIBILITY_MODES, using first allowed", Service.DefaultUserVisibility)
+ Service.DefaultUserVisibility = Service.AllowedUserVisibilityModes[0]
+ }
+ Service.DefaultUserVisibilityMode = structs.VisibilityModes[Service.DefaultUserVisibility]
+ Service.DefaultOrgVisibility = sec.Key("DEFAULT_ORG_VISIBILITY").In("public", structs.ExtractKeysFromMapString(structs.VisibilityModes))
+ Service.DefaultOrgVisibilityMode = structs.VisibilityModes[Service.DefaultOrgVisibility]
+ Service.DefaultOrgMemberVisible = sec.Key("DEFAULT_ORG_MEMBER_VISIBLE").MustBool()
+ Service.UserDeleteWithCommentsMaxTime = sec.Key("USER_DELETE_WITH_COMMENTS_MAX_TIME").MustDuration(0)
+ sec.Key("VALID_SITE_URL_SCHEMES").MustString("http,https")
+ Service.ValidSiteURLSchemes = sec.Key("VALID_SITE_URL_SCHEMES").Strings(",")
+ schemes := make([]string, 0, len(Service.ValidSiteURLSchemes))
+ for _, scheme := range Service.ValidSiteURLSchemes {
+ scheme = strings.ToLower(strings.TrimSpace(scheme))
+ if scheme != "" {
+ schemes = append(schemes, scheme)
+ }
+ }
+ Service.ValidSiteURLSchemes = schemes
+
+ mustMapSetting(rootCfg, "service.explore", &Service.Explore)
+
+ loadOpenIDSetting(rootCfg)
+}
+
+func loadOpenIDSetting(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("openid")
+ Service.EnableOpenIDSignIn = sec.Key("ENABLE_OPENID_SIGNIN").MustBool(!InstallLock)
+ Service.EnableOpenIDSignUp = sec.Key("ENABLE_OPENID_SIGNUP").MustBool(!Service.DisableRegistration && Service.EnableOpenIDSignIn)
+ pats := sec.Key("WHITELISTED_URIS").Strings(" ")
+ if len(pats) != 0 {
+ Service.OpenIDWhitelist = make([]*regexp.Regexp, len(pats))
+ for i, p := range pats {
+ Service.OpenIDWhitelist[i] = regexp.MustCompilePOSIX(p)
+ }
+ }
+ pats = sec.Key("BLACKLISTED_URIS").Strings(" ")
+ if len(pats) != 0 {
+ Service.OpenIDBlacklist = make([]*regexp.Regexp, len(pats))
+ for i, p := range pats {
+ Service.OpenIDBlacklist[i] = regexp.MustCompilePOSIX(p)
+ }
+ }
+}
diff --git a/modules/setting/service_test.go b/modules/setting/service_test.go
new file mode 100644
index 0000000..7a13e39
--- /dev/null
+++ b/modules/setting/service_test.go
@@ -0,0 +1,133 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/structs"
+
+ "github.com/gobwas/glob"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLoadServices(t *testing.T) {
+ oldService := Service
+ defer func() {
+ Service = oldService
+ }()
+
+ cfg, err := NewConfigProviderFromData(`
+[service]
+EMAIL_DOMAIN_WHITELIST = d1, *.w
+EMAIL_DOMAIN_ALLOWLIST = d2, *.a
+EMAIL_DOMAIN_BLOCKLIST = d3, *.b
+`)
+ require.NoError(t, err)
+ loadServiceFrom(cfg)
+
+ match := func(globs []glob.Glob, s string) bool {
+ for _, g := range globs {
+ if g.Match(s) {
+ return true
+ }
+ }
+ return false
+ }
+
+ assert.True(t, match(Service.EmailDomainAllowList, "d1"))
+ assert.True(t, match(Service.EmailDomainAllowList, "foo.w"))
+ assert.True(t, match(Service.EmailDomainAllowList, "d2"))
+ assert.True(t, match(Service.EmailDomainAllowList, "foo.a"))
+ assert.False(t, match(Service.EmailDomainAllowList, "d3"))
+
+ assert.True(t, match(Service.EmailDomainBlockList, "d3"))
+ assert.True(t, match(Service.EmailDomainBlockList, "foo.b"))
+ assert.False(t, match(Service.EmailDomainBlockList, "d1"))
+}
+
+func TestLoadServiceVisibilityModes(t *testing.T) {
+ oldService := Service
+ defer func() {
+ Service = oldService
+ }()
+
+ kases := map[string]func(){
+ `
+[service]
+DEFAULT_USER_VISIBILITY = public
+ALLOWED_USER_VISIBILITY_MODES = public,limited,private
+`: func() {
+ assert.Equal(t, "public", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypePublic, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"public", "limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+ [service]
+ DEFAULT_USER_VISIBILITY = public
+ `: func() {
+ assert.Equal(t, "public", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypePublic, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"public", "limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+ [service]
+ DEFAULT_USER_VISIBILITY = limited
+ `: func() {
+ assert.Equal(t, "limited", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypeLimited, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"public", "limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+[service]
+ALLOWED_USER_VISIBILITY_MODES = public,limited,private
+`: func() {
+ assert.Equal(t, "public", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypePublic, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"public", "limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+[service]
+DEFAULT_USER_VISIBILITY = public
+ALLOWED_USER_VISIBILITY_MODES = limited,private
+`: func() {
+ assert.Equal(t, "limited", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypeLimited, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+[service]
+DEFAULT_USER_VISIBILITY = my_type
+ALLOWED_USER_VISIBILITY_MODES = limited,private
+`: func() {
+ assert.Equal(t, "limited", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypeLimited, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"limited", "private"}, Service.AllowedUserVisibilityModes)
+ },
+ `
+[service]
+DEFAULT_USER_VISIBILITY = public
+ALLOWED_USER_VISIBILITY_MODES = public, limit, privated
+`: func() {
+ assert.Equal(t, "public", Service.DefaultUserVisibility)
+ assert.Equal(t, structs.VisibleTypePublic, Service.DefaultUserVisibilityMode)
+ assert.Equal(t, []string{"public"}, Service.AllowedUserVisibilityModes)
+ },
+ }
+
+ for kase, fun := range kases {
+ t.Run(kase, func(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(kase)
+ require.NoError(t, err)
+ loadServiceFrom(cfg)
+ fun()
+ // reset
+ Service.AllowedUserVisibilityModesSlice = []bool{true, true, true}
+ Service.AllowedUserVisibilityModes = []string{}
+ Service.DefaultUserVisibility = ""
+ Service.DefaultUserVisibilityMode = structs.VisibleTypePublic
+ })
+ }
+}
diff --git a/modules/setting/session.go b/modules/setting/session.go
new file mode 100644
index 0000000..e9637fd
--- /dev/null
+++ b/modules/setting/session.go
@@ -0,0 +1,78 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/http"
+ "path"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// SessionConfig defines Session settings
+var SessionConfig = struct {
+ OriginalProvider string
+ Provider string
+ // Provider configuration, it's corresponding to provider.
+ ProviderConfig string
+ // Cookie name to save session ID. Default is "MacaronSession".
+ CookieName string
+ // Cookie path to store. Default is "/".
+ CookiePath string
+ // GC interval time in seconds. Default is 3600.
+ Gclifetime int64
+ // Max life time in seconds. Default is whatever GC interval time is.
+ Maxlifetime int64
+ // Use HTTPS only. Default is false.
+ Secure bool
+ // Cookie domain name. Default is empty.
+ Domain string
+ // SameSite declares if your cookie should be restricted to a first-party or same-site context. Valid strings are "none", "lax", "strict". Default is "lax"
+ SameSite http.SameSite
+}{
+ CookieName: "i_like_gitea",
+ Gclifetime: 86400,
+ Maxlifetime: 86400,
+ SameSite: http.SameSiteLaxMode,
+}
+
+func loadSessionFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("session")
+ SessionConfig.Provider = sec.Key("PROVIDER").In("memory",
+ []string{"memory", "file", "redis", "mysql", "postgres", "couchbase", "memcache", "db"})
+ SessionConfig.ProviderConfig = strings.Trim(sec.Key("PROVIDER_CONFIG").MustString(path.Join(AppDataPath, "sessions")), "\" ")
+ if SessionConfig.Provider == "file" && !filepath.IsAbs(SessionConfig.ProviderConfig) {
+ SessionConfig.ProviderConfig = path.Join(AppWorkPath, SessionConfig.ProviderConfig)
+ }
+ SessionConfig.CookieName = sec.Key("COOKIE_NAME").MustString("i_like_gitea")
+ SessionConfig.CookiePath = AppSubURL
+ if SessionConfig.CookiePath == "" {
+ SessionConfig.CookiePath = "/"
+ }
+ SessionConfig.Secure = sec.Key("COOKIE_SECURE").MustBool(strings.HasPrefix(strings.ToLower(AppURL), "https://"))
+ SessionConfig.Gclifetime = sec.Key("GC_INTERVAL_TIME").MustInt64(86400)
+ SessionConfig.Maxlifetime = sec.Key("SESSION_LIFE_TIME").MustInt64(86400)
+ SessionConfig.Domain = sec.Key("DOMAIN").String()
+ samesiteString := sec.Key("SAME_SITE").In("lax", []string{"none", "lax", "strict"})
+ switch strings.ToLower(samesiteString) {
+ case "none":
+ SessionConfig.SameSite = http.SameSiteNoneMode
+ case "strict":
+ SessionConfig.SameSite = http.SameSiteStrictMode
+ default:
+ SessionConfig.SameSite = http.SameSiteLaxMode
+ }
+ shadowConfig, err := json.Marshal(SessionConfig)
+ if err != nil {
+ log.Fatal("Can't shadow session config: %v", err)
+ }
+ SessionConfig.ProviderConfig = string(shadowConfig)
+ SessionConfig.OriginalProvider = SessionConfig.Provider
+ SessionConfig.Provider = "VirtualSession"
+
+ log.Info("Session Service Enabled")
+}
diff --git a/modules/setting/setting.go b/modules/setting/setting.go
new file mode 100644
index 0000000..c9d3083
--- /dev/null
+++ b/modules/setting/setting.go
@@ -0,0 +1,238 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "fmt"
+ "os"
+ "runtime"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/user"
+ "code.gitea.io/gitea/modules/util"
+)
+
+var ForgejoVersion = "1.0.0"
+
+// settings
+var (
+ // AppVer is the version of the current build of Gitea. It is set in main.go from main.Version.
+ AppVer string
+ // AppBuiltWith represents a human-readable version go runtime build version and build tags. (See main.go formatBuiltWith().)
+ AppBuiltWith string
+ // AppStartTime store time gitea has started
+ AppStartTime time.Time
+
+ // Other global setting objects
+
+ CfgProvider ConfigProvider
+ RunMode string
+ RunUser string
+ IsProd bool
+ IsWindows bool
+
+ // IsInTesting indicates whether the testing is running. A lot of unreliable code causes a lot of nonsense error logs during testing
+ // TODO: this is only a temporary solution, we should make the test code more reliable
+ IsInTesting = false
+)
+
+func init() {
+ IsWindows = runtime.GOOS == "windows"
+ if AppVer == "" {
+ AppVer = "dev"
+ }
+
+ // We can rely on log.CanColorStdout being set properly because modules/log/console_windows.go comes before modules/setting/setting.go lexicographically
+ // By default set this logger at Info - we'll change it later, but we need to start with something.
+ log.SetConsoleLogger(log.DEFAULT, "console", log.INFO)
+}
+
+// IsRunUserMatchCurrentUser returns false if configured run user does not match
+// actual user that runs the app. The first return value is the actual user name.
+// This check is ignored under Windows since SSH remote login is not the main
+// method to login on Windows.
+func IsRunUserMatchCurrentUser(runUser string) (string, bool) {
+ if IsWindows || SSH.StartBuiltinServer {
+ return "", true
+ }
+
+ currentUser := user.CurrentUsername()
+ return currentUser, runUser == currentUser
+}
+
+// PrepareAppDataPath creates app data directory if necessary
+func PrepareAppDataPath() error {
+ // FIXME: There are too many calls to MkdirAll in old code. It is incorrect.
+ // For example, if someDir=/mnt/vol1/gitea-home/data, if the mount point /mnt/vol1 is not mounted when Forgejo runs,
+ // then Forgejo will make new empty directories in /mnt/vol1, all are stored in the root filesystem.
+ // The correct behavior should be: creating parent directories is end users' duty. We only create sub-directories in existing parent directories.
+ // For quickstart, the parent directories should be created automatically for first startup (eg: a flag or a check of INSTALL_LOCK).
+ // Now we can take the first step to do correctly (using Mkdir) in other packages, and prepare the AppDataPath here, then make a refactor in future.
+
+ st, err := os.Stat(AppDataPath)
+ if os.IsNotExist(err) {
+ err = os.MkdirAll(AppDataPath, os.ModePerm)
+ if err != nil {
+ return fmt.Errorf("unable to create the APP_DATA_PATH directory: %q, Error: %w", AppDataPath, err)
+ }
+ return nil
+ }
+
+ if err != nil {
+ return fmt.Errorf("unable to use APP_DATA_PATH %q. Error: %w", AppDataPath, err)
+ }
+
+ if !st.IsDir() /* also works for symlink */ {
+ return fmt.Errorf("the APP_DATA_PATH %q is not a directory (or symlink to a directory) and can't be used", AppDataPath)
+ }
+
+ return nil
+}
+
+func InitCfgProvider(file string) {
+ var err error
+ if CfgProvider, err = NewConfigProviderFromFile(file); err != nil {
+ log.Fatal("Unable to init config provider from %q: %v", file, err)
+ }
+ CfgProvider.DisableSaving() // do not allow saving the CfgProvider into file, it will be polluted by the "MustXxx" calls
+}
+
+func MustInstalled() {
+ if !InstallLock {
+ log.Fatal(`Unable to load config file for a installed Forgejo instance, you should either use "--config" to set your config file (app.ini), or run "forgejo web" command to install Forgejo.`)
+ }
+}
+
+func LoadCommonSettings() {
+ if err := loadCommonSettingsFrom(CfgProvider); err != nil {
+ log.Fatal("Unable to load settings from config: %v", err)
+ }
+}
+
+// loadCommonSettingsFrom loads common configurations from a configuration provider.
+func loadCommonSettingsFrom(cfg ConfigProvider) error {
+ // WARNING: don't change the sequence except you know what you are doing.
+ loadRunModeFrom(cfg)
+ loadLogGlobalFrom(cfg)
+ loadServerFrom(cfg)
+ loadSSHFrom(cfg)
+
+ mustCurrentRunUserMatch(cfg) // it depends on the SSH config, only non-builtin SSH server requires this check
+
+ loadOAuth2From(cfg)
+ loadSecurityFrom(cfg)
+ if err := loadAttachmentFrom(cfg); err != nil {
+ return err
+ }
+ if err := loadLFSFrom(cfg); err != nil {
+ return err
+ }
+ loadTimeFrom(cfg)
+ loadRepositoryFrom(cfg)
+ if err := loadAvatarsFrom(cfg); err != nil {
+ return err
+ }
+ if err := loadRepoAvatarFrom(cfg); err != nil {
+ return err
+ }
+ if err := loadPackagesFrom(cfg); err != nil {
+ return err
+ }
+ if err := loadActionsFrom(cfg); err != nil {
+ return err
+ }
+ loadUIFrom(cfg)
+ loadAdminFrom(cfg)
+ loadAPIFrom(cfg)
+ loadBadgesFrom(cfg)
+ loadMetricsFrom(cfg)
+ loadCamoFrom(cfg)
+ loadI18nFrom(cfg)
+ loadGitFrom(cfg)
+ loadMirrorFrom(cfg)
+ loadMarkupFrom(cfg)
+ loadQuotaFrom(cfg)
+ loadOtherFrom(cfg)
+ return nil
+}
+
+func loadRunModeFrom(rootCfg ConfigProvider) {
+ rootSec := rootCfg.Section("")
+ RunUser = rootSec.Key("RUN_USER").MustString(user.CurrentUsername())
+
+ // The following is a purposefully undocumented option. Please do not run Forgejo as root. It will only cause future headaches.
+ // Please don't use root as a bandaid to "fix" something that is broken, instead the broken thing should instead be fixed properly.
+ unsafeAllowRunAsRoot := ConfigSectionKeyBool(rootSec, "I_AM_BEING_UNSAFE_RUNNING_AS_ROOT")
+ unsafeAllowRunAsRoot = unsafeAllowRunAsRoot || util.OptionalBoolParse(os.Getenv("GITEA_I_AM_BEING_UNSAFE_RUNNING_AS_ROOT")).Value()
+ RunMode = os.Getenv("GITEA_RUN_MODE")
+ if RunMode == "" {
+ RunMode = rootSec.Key("RUN_MODE").MustString("prod")
+ }
+
+ // non-dev mode is treated as prod mode, to protect users from accidentally running in dev mode if there is a typo in this value.
+ RunMode = strings.ToLower(RunMode)
+ if RunMode != "dev" {
+ RunMode = "prod"
+ }
+ IsProd = RunMode != "dev"
+
+ // check if we run as root
+ if os.Getuid() == 0 {
+ if !unsafeAllowRunAsRoot {
+ // Special thanks to VLC which inspired the wording of this messaging.
+ log.Fatal("Forgejo is not supposed to be run as root. Sorry. If you need to use privileged TCP ports please instead use setcap and the `cap_net_bind_service` permission")
+ }
+ log.Critical("You are running Forgejo using the root user, and have purposely chosen to skip built-in protections around this. You have been warned against this.")
+ }
+}
+
+// HasInstallLock checks the install-lock in ConfigProvider directly, because sometimes the config file is not loaded into setting variables yet.
+func HasInstallLock(rootCfg ConfigProvider) bool {
+ return rootCfg.Section("security").Key("INSTALL_LOCK").MustBool(false)
+}
+
+func mustCurrentRunUserMatch(rootCfg ConfigProvider) {
+ // Does not check run user when the "InstallLock" is off.
+ if HasInstallLock(rootCfg) {
+ currentUser, match := IsRunUserMatchCurrentUser(RunUser)
+ if !match {
+ log.Fatal("Expect user '%s' but current user is: %s", RunUser, currentUser)
+ }
+ }
+}
+
+// LoadSettings initializes the settings for normal start up
+func LoadSettings() {
+ initAllLoggers()
+
+ loadDBSetting(CfgProvider)
+ loadServiceFrom(CfgProvider)
+ loadOAuth2ClientFrom(CfgProvider)
+ loadCacheFrom(CfgProvider)
+ loadSessionFrom(CfgProvider)
+ loadCorsFrom(CfgProvider)
+ loadMailsFrom(CfgProvider)
+ loadProxyFrom(CfgProvider)
+ loadWebhookFrom(CfgProvider)
+ loadMigrationsFrom(CfgProvider)
+ loadIndexerFrom(CfgProvider)
+ loadTaskFrom(CfgProvider)
+ LoadQueueSettings()
+ loadProjectFrom(CfgProvider)
+ loadMimeTypeMapFrom(CfgProvider)
+ loadFederationFrom(CfgProvider)
+ loadF3From(CfgProvider)
+}
+
+// LoadSettingsForInstall initializes the settings for install
+func LoadSettingsForInstall() {
+ initAllLoggers()
+
+ loadDBSetting(CfgProvider)
+ loadServiceFrom(CfgProvider)
+ loadMailerFrom(CfgProvider)
+}
diff --git a/modules/setting/setting_test.go b/modules/setting/setting_test.go
new file mode 100644
index 0000000..f77ee65
--- /dev/null
+++ b/modules/setting/setting_test.go
@@ -0,0 +1,32 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "testing"
+
+ "code.gitea.io/gitea/modules/json"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMakeAbsoluteAssetURL(t *testing.T) {
+ assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234", "https://localhost:2345"))
+ assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234/", "https://localhost:2345"))
+ assert.Equal(t, "https://localhost:2345", MakeAbsoluteAssetURL("https://localhost:1234/", "https://localhost:2345/"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234", "/foo"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/", "/foo"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/", "/foo/"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo", "/foo"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/foo"))
+ assert.Equal(t, "https://localhost:1234/foo", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/foo/"))
+ assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo", "/bar"))
+ assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/bar"))
+ assert.Equal(t, "https://localhost:1234/bar", MakeAbsoluteAssetURL("https://localhost:1234/foo/", "/bar/"))
+}
+
+func TestMakeManifestData(t *testing.T) {
+ jsonBytes := MakeManifestData(`Example App '\"`, "https://example.com", "https://example.com/foo/bar")
+ assert.True(t, json.Valid(jsonBytes))
+}
diff --git a/modules/setting/ssh.go b/modules/setting/ssh.go
new file mode 100644
index 0000000..ea387e5
--- /dev/null
+++ b/modules/setting/ssh.go
@@ -0,0 +1,197 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "text/template"
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/util"
+
+ gossh "golang.org/x/crypto/ssh"
+)
+
+var SSH = struct {
+ Disabled bool `ini:"DISABLE_SSH"`
+ StartBuiltinServer bool `ini:"START_SSH_SERVER"`
+ BuiltinServerUser string `ini:"BUILTIN_SSH_SERVER_USER"`
+ UseProxyProtocol bool `ini:"SSH_SERVER_USE_PROXY_PROTOCOL"`
+ Domain string `ini:"SSH_DOMAIN"`
+ Port int `ini:"SSH_PORT"`
+ User string `ini:"SSH_USER"`
+ ListenHost string `ini:"SSH_LISTEN_HOST"`
+ ListenPort int `ini:"SSH_LISTEN_PORT"`
+ RootPath string `ini:"SSH_ROOT_PATH"`
+ ServerCiphers []string `ini:"SSH_SERVER_CIPHERS"`
+ ServerKeyExchanges []string `ini:"SSH_SERVER_KEY_EXCHANGES"`
+ ServerMACs []string `ini:"SSH_SERVER_MACS"`
+ ServerHostKeys []string `ini:"SSH_SERVER_HOST_KEYS"`
+ KeyTestPath string `ini:"SSH_KEY_TEST_PATH"`
+ KeygenPath string `ini:"SSH_KEYGEN_PATH"`
+ AuthorizedKeysBackup bool `ini:"SSH_AUTHORIZED_KEYS_BACKUP"`
+ AuthorizedPrincipalsBackup bool `ini:"SSH_AUTHORIZED_PRINCIPALS_BACKUP"`
+ AuthorizedKeysCommandTemplate string `ini:"SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE"`
+ AuthorizedKeysCommandTemplateTemplate *template.Template `ini:"-"`
+ MinimumKeySizeCheck bool `ini:"-"`
+ MinimumKeySizes map[string]int `ini:"-"`
+ CreateAuthorizedKeysFile bool `ini:"SSH_CREATE_AUTHORIZED_KEYS_FILE"`
+ CreateAuthorizedPrincipalsFile bool `ini:"SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE"`
+ ExposeAnonymous bool `ini:"SSH_EXPOSE_ANONYMOUS"`
+ AuthorizedPrincipalsAllow []string `ini:"SSH_AUTHORIZED_PRINCIPALS_ALLOW"`
+ AuthorizedPrincipalsEnabled bool `ini:"-"`
+ TrustedUserCAKeys []string `ini:"SSH_TRUSTED_USER_CA_KEYS"`
+ TrustedUserCAKeysFile string `ini:"SSH_TRUSTED_USER_CA_KEYS_FILENAME"`
+ TrustedUserCAKeysParsed []gossh.PublicKey `ini:"-"`
+ PerWriteTimeout time.Duration `ini:"SSH_PER_WRITE_TIMEOUT"`
+ PerWritePerKbTimeout time.Duration `ini:"SSH_PER_WRITE_PER_KB_TIMEOUT"`
+}{
+ Disabled: false,
+ StartBuiltinServer: false,
+ Domain: "",
+ Port: 22,
+ ServerCiphers: []string{"chacha20-poly1305@openssh.com", "aes128-ctr", "aes192-ctr", "aes256-ctr", "aes128-gcm@openssh.com", "aes256-gcm@openssh.com"},
+ ServerKeyExchanges: []string{"curve25519-sha256", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "diffie-hellman-group14-sha256", "diffie-hellman-group14-sha1"},
+ ServerMACs: []string{"hmac-sha2-256-etm@openssh.com", "hmac-sha2-256", "hmac-sha1"},
+ KeygenPath: "",
+ MinimumKeySizeCheck: true,
+ MinimumKeySizes: map[string]int{"ed25519": 256, "ed25519-sk": 256, "ecdsa": 256, "ecdsa-sk": 256, "rsa": 3071},
+ ServerHostKeys: []string{"ssh/gitea.rsa", "ssh/gogs.rsa"},
+ AuthorizedKeysCommandTemplate: "{{.AppPath}} --config={{.CustomConf}} serv key-{{.Key.ID}}",
+ PerWriteTimeout: PerWriteTimeout,
+ PerWritePerKbTimeout: PerWritePerKbTimeout,
+}
+
+func parseAuthorizedPrincipalsAllow(values []string) ([]string, bool) {
+ anything := false
+ email := false
+ username := false
+ for _, value := range values {
+ v := strings.ToLower(strings.TrimSpace(value))
+ switch v {
+ case "off":
+ return []string{"off"}, false
+ case "email":
+ email = true
+ case "username":
+ username = true
+ case "anything":
+ anything = true
+ }
+ }
+ if anything {
+ return []string{"anything"}, true
+ }
+
+ authorizedPrincipalsAllow := []string{}
+ if username {
+ authorizedPrincipalsAllow = append(authorizedPrincipalsAllow, "username")
+ }
+ if email {
+ authorizedPrincipalsAllow = append(authorizedPrincipalsAllow, "email")
+ }
+
+ return authorizedPrincipalsAllow, true
+}
+
+func loadSSHFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("server")
+ if len(SSH.Domain) == 0 {
+ SSH.Domain = Domain
+ }
+
+ homeDir, err := util.HomeDir()
+ if err != nil {
+ log.Fatal("Failed to get home directory: %v", err)
+ }
+ homeDir = strings.ReplaceAll(homeDir, "\\", "/")
+
+ SSH.RootPath = path.Join(homeDir, ".ssh")
+ serverCiphers := sec.Key("SSH_SERVER_CIPHERS").Strings(",")
+ if len(serverCiphers) > 0 {
+ SSH.ServerCiphers = serverCiphers
+ }
+ serverKeyExchanges := sec.Key("SSH_SERVER_KEY_EXCHANGES").Strings(",")
+ if len(serverKeyExchanges) > 0 {
+ SSH.ServerKeyExchanges = serverKeyExchanges
+ }
+ serverMACs := sec.Key("SSH_SERVER_MACS").Strings(",")
+ if len(serverMACs) > 0 {
+ SSH.ServerMACs = serverMACs
+ }
+ SSH.KeyTestPath = os.TempDir()
+ if err = sec.MapTo(&SSH); err != nil {
+ log.Fatal("Failed to map SSH settings: %v", err)
+ }
+ for i, key := range SSH.ServerHostKeys {
+ if !filepath.IsAbs(key) {
+ SSH.ServerHostKeys[i] = filepath.Join(AppDataPath, key)
+ }
+ }
+
+ SSH.KeygenPath = sec.Key("SSH_KEYGEN_PATH").String()
+ SSH.Port = sec.Key("SSH_PORT").MustInt(22)
+ SSH.ListenPort = sec.Key("SSH_LISTEN_PORT").MustInt(SSH.Port)
+ SSH.UseProxyProtocol = sec.Key("SSH_SERVER_USE_PROXY_PROTOCOL").MustBool(false)
+
+ // When disable SSH, start builtin server value is ignored.
+ if SSH.Disabled {
+ SSH.StartBuiltinServer = false
+ }
+
+ SSH.TrustedUserCAKeysFile = sec.Key("SSH_TRUSTED_USER_CA_KEYS_FILENAME").MustString(filepath.Join(SSH.RootPath, "gitea-trusted-user-ca-keys.pem"))
+
+ for _, caKey := range SSH.TrustedUserCAKeys {
+ pubKey, _, _, _, err := gossh.ParseAuthorizedKey([]byte(caKey))
+ if err != nil {
+ log.Fatal("Failed to parse TrustedUserCaKeys: %s %v", caKey, err)
+ }
+
+ SSH.TrustedUserCAKeysParsed = append(SSH.TrustedUserCAKeysParsed, pubKey)
+ }
+ if len(SSH.TrustedUserCAKeys) > 0 {
+ // Set the default as email,username otherwise we can leave it empty
+ sec.Key("SSH_AUTHORIZED_PRINCIPALS_ALLOW").MustString("username,email")
+ } else {
+ sec.Key("SSH_AUTHORIZED_PRINCIPALS_ALLOW").MustString("off")
+ }
+
+ SSH.AuthorizedPrincipalsAllow, SSH.AuthorizedPrincipalsEnabled = parseAuthorizedPrincipalsAllow(sec.Key("SSH_AUTHORIZED_PRINCIPALS_ALLOW").Strings(","))
+
+ SSH.MinimumKeySizeCheck = sec.Key("MINIMUM_KEY_SIZE_CHECK").MustBool(SSH.MinimumKeySizeCheck)
+ minimumKeySizes := rootCfg.Section("ssh.minimum_key_sizes").Keys()
+ for _, key := range minimumKeySizes {
+ if key.MustInt() != -1 {
+ SSH.MinimumKeySizes[strings.ToLower(key.Name())] = key.MustInt()
+ } else {
+ delete(SSH.MinimumKeySizes, strings.ToLower(key.Name()))
+ }
+ }
+
+ SSH.AuthorizedKeysBackup = sec.Key("SSH_AUTHORIZED_KEYS_BACKUP").MustBool(false)
+ SSH.CreateAuthorizedKeysFile = sec.Key("SSH_CREATE_AUTHORIZED_KEYS_FILE").MustBool(true)
+
+ SSH.AuthorizedPrincipalsBackup = false
+ SSH.CreateAuthorizedPrincipalsFile = false
+ if SSH.AuthorizedPrincipalsEnabled {
+ SSH.AuthorizedPrincipalsBackup = sec.Key("SSH_AUTHORIZED_PRINCIPALS_BACKUP").MustBool(true)
+ SSH.CreateAuthorizedPrincipalsFile = sec.Key("SSH_CREATE_AUTHORIZED_PRINCIPALS_FILE").MustBool(true)
+ }
+
+ SSH.ExposeAnonymous = sec.Key("SSH_EXPOSE_ANONYMOUS").MustBool(false)
+ SSH.AuthorizedKeysCommandTemplate = sec.Key("SSH_AUTHORIZED_KEYS_COMMAND_TEMPLATE").MustString(SSH.AuthorizedKeysCommandTemplate)
+
+ SSH.AuthorizedKeysCommandTemplateTemplate = template.Must(template.New("").Parse(SSH.AuthorizedKeysCommandTemplate))
+
+ SSH.PerWriteTimeout = sec.Key("SSH_PER_WRITE_TIMEOUT").MustDuration(PerWriteTimeout)
+ SSH.PerWritePerKbTimeout = sec.Key("SSH_PER_WRITE_PER_KB_TIMEOUT").MustDuration(PerWritePerKbTimeout)
+
+ // ensure parseRunModeSetting has been executed before this
+ SSH.BuiltinServerUser = rootCfg.Section("server").Key("BUILTIN_SSH_SERVER_USER").MustString(RunUser)
+ SSH.User = rootCfg.Section("server").Key("SSH_USER").MustString(SSH.BuiltinServerUser)
+}
diff --git a/modules/setting/storage.go b/modules/setting/storage.go
new file mode 100644
index 0000000..8ee5c0f
--- /dev/null
+++ b/modules/setting/storage.go
@@ -0,0 +1,275 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "errors"
+ "fmt"
+ "path/filepath"
+ "strings"
+)
+
+// StorageType is a type of Storage
+type StorageType string
+
+const (
+ // LocalStorageType is the type descriptor for local storage
+ LocalStorageType StorageType = "local"
+ // MinioStorageType is the type descriptor for minio storage
+ MinioStorageType StorageType = "minio"
+)
+
+var storageTypes = []StorageType{
+ LocalStorageType,
+ MinioStorageType,
+}
+
+// IsValidStorageType returns true if the given storage type is valid
+func IsValidStorageType(storageType StorageType) bool {
+ for _, t := range storageTypes {
+ if t == storageType {
+ return true
+ }
+ }
+ return false
+}
+
+// MinioStorageConfig represents the configuration for a minio storage
+type MinioStorageConfig struct {
+ Endpoint string `ini:"MINIO_ENDPOINT" json:",omitempty"`
+ AccessKeyID string `ini:"MINIO_ACCESS_KEY_ID" json:",omitempty"`
+ SecretAccessKey string `ini:"MINIO_SECRET_ACCESS_KEY" json:",omitempty"`
+ Bucket string `ini:"MINIO_BUCKET" json:",omitempty"`
+ BucketLookup string `ini:"MINIO_BUCKET_LOOKUP" json:",omitempty"`
+ Location string `ini:"MINIO_LOCATION" json:",omitempty"`
+ BasePath string `ini:"MINIO_BASE_PATH" json:",omitempty"`
+ UseSSL bool `ini:"MINIO_USE_SSL"`
+ InsecureSkipVerify bool `ini:"MINIO_INSECURE_SKIP_VERIFY"`
+ ChecksumAlgorithm string `ini:"MINIO_CHECKSUM_ALGORITHM" json:",omitempty"`
+ ServeDirect bool `ini:"SERVE_DIRECT"`
+}
+
+// Storage represents configuration of storages
+type Storage struct {
+ Type StorageType // local or minio
+ Path string `json:",omitempty"` // for local type
+ TemporaryPath string `json:",omitempty"`
+ MinioConfig MinioStorageConfig // for minio type
+}
+
+func (storage *Storage) ToShadowCopy() Storage {
+ shadowStorage := *storage
+ if shadowStorage.MinioConfig.AccessKeyID != "" {
+ shadowStorage.MinioConfig.AccessKeyID = "******"
+ }
+ if shadowStorage.MinioConfig.SecretAccessKey != "" {
+ shadowStorage.MinioConfig.SecretAccessKey = "******"
+ }
+ return shadowStorage
+}
+
+const storageSectionName = "storage"
+
+func getDefaultStorageSection(rootCfg ConfigProvider) ConfigSection {
+ storageSec := rootCfg.Section(storageSectionName)
+ // Global Defaults
+ storageSec.Key("STORAGE_TYPE").MustString("local")
+ storageSec.Key("MINIO_ENDPOINT").MustString("localhost:9000")
+ storageSec.Key("MINIO_ACCESS_KEY_ID").MustString("")
+ storageSec.Key("MINIO_SECRET_ACCESS_KEY").MustString("")
+ storageSec.Key("MINIO_BUCKET").MustString("gitea")
+ storageSec.Key("MINIO_BUCKET_LOOKUP").MustString("auto")
+ storageSec.Key("MINIO_LOCATION").MustString("us-east-1")
+ storageSec.Key("MINIO_USE_SSL").MustBool(false)
+ storageSec.Key("MINIO_INSECURE_SKIP_VERIFY").MustBool(false)
+ storageSec.Key("MINIO_CHECKSUM_ALGORITHM").MustString("default")
+ return storageSec
+}
+
+// getStorage will find target section and extra special section first and then read override
+// items from extra section
+func getStorage(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (*Storage, error) {
+ if name == "" {
+ return nil, errors.New("no name for storage")
+ }
+
+ targetSec, tp, err := getStorageTargetSection(rootCfg, name, typ, sec)
+ if err != nil {
+ return nil, err
+ }
+
+ overrideSec := getStorageOverrideSection(rootCfg, sec, tp, name)
+
+ targetType := targetSec.Key("STORAGE_TYPE").String()
+ switch targetType {
+ case string(LocalStorageType):
+ return getStorageForLocal(targetSec, overrideSec, tp, name)
+ case string(MinioStorageType):
+ return getStorageForMinio(targetSec, overrideSec, tp, name)
+ default:
+ return nil, fmt.Errorf("unsupported storage type %q", targetType)
+ }
+}
+
+type targetSecType int
+
+const (
+ targetSecIsTyp targetSecType = iota // target section is [storage.type] which the type from parameter
+ targetSecIsStorage // target section is [storage]
+ targetSecIsDefault // target section is the default value
+ targetSecIsStorageWithName // target section is [storage.name]
+ targetSecIsSec // target section is from the name seciont [name]
+)
+
+func getStorageSectionByType(rootCfg ConfigProvider, typ string) (ConfigSection, targetSecType, error) { //nolint:unparam
+ targetSec, err := rootCfg.GetSection(storageSectionName + "." + typ)
+ if err != nil {
+ if !IsValidStorageType(StorageType(typ)) {
+ return nil, 0, fmt.Errorf("get section via storage type %q failed: %v", typ, err)
+ }
+ // if typ is a valid storage type, but there is no [storage.local] or [storage.minio] section
+ // it's not an error
+ return nil, 0, nil
+ }
+
+ targetType := targetSec.Key("STORAGE_TYPE").String()
+ if targetType == "" {
+ if !IsValidStorageType(StorageType(typ)) {
+ return nil, 0, fmt.Errorf("unknow storage type %q", typ)
+ }
+ targetSec.Key("STORAGE_TYPE").SetValue(typ)
+ } else if !IsValidStorageType(StorageType(targetType)) {
+ return nil, 0, fmt.Errorf("unknow storage type %q for section storage.%v", targetType, typ)
+ }
+
+ return targetSec, targetSecIsTyp, nil
+}
+
+func getStorageTargetSection(rootCfg ConfigProvider, name, typ string, sec ConfigSection) (ConfigSection, targetSecType, error) {
+ // check typ first
+ if typ == "" {
+ if sec != nil { // check sec's type secondly
+ typ = sec.Key("STORAGE_TYPE").String()
+ if IsValidStorageType(StorageType(typ)) {
+ if targetSec, _ := rootCfg.GetSection(storageSectionName + "." + typ); targetSec == nil {
+ return sec, targetSecIsSec, nil
+ }
+ }
+ }
+ }
+
+ if typ != "" {
+ targetSec, tp, err := getStorageSectionByType(rootCfg, typ)
+ if targetSec != nil || err != nil {
+ return targetSec, tp, err
+ }
+ }
+
+ // check stoarge name thirdly
+ targetSec, _ := rootCfg.GetSection(storageSectionName + "." + name)
+ if targetSec != nil {
+ targetType := targetSec.Key("STORAGE_TYPE").String()
+ switch {
+ case targetType == "":
+ if targetSec.Key("PATH").String() == "" { // both storage type and path are empty, use default
+ return getDefaultStorageSection(rootCfg), targetSecIsDefault, nil
+ }
+
+ targetSec.Key("STORAGE_TYPE").SetValue("local")
+ default:
+ targetSec, tp, err := getStorageSectionByType(rootCfg, targetType)
+ if targetSec != nil || err != nil {
+ return targetSec, tp, err
+ }
+ }
+
+ return targetSec, targetSecIsStorageWithName, nil
+ }
+
+ return getDefaultStorageSection(rootCfg), targetSecIsDefault, nil
+}
+
+// getStorageOverrideSection override section will be read SERVE_DIRECT, PATH, MINIO_BASE_PATH, MINIO_BUCKET to override the targetsec when possible
+func getStorageOverrideSection(rootConfig ConfigProvider, sec ConfigSection, targetSecType targetSecType, name string) ConfigSection {
+ if targetSecType == targetSecIsSec {
+ return nil
+ }
+
+ if sec != nil {
+ return sec
+ }
+
+ if targetSecType != targetSecIsStorageWithName {
+ nameSec, _ := rootConfig.GetSection(storageSectionName + "." + name)
+ return nameSec
+ }
+ return nil
+}
+
+func getStorageForLocal(targetSec, overrideSec ConfigSection, tp targetSecType, name string) (*Storage, error) {
+ storage := Storage{
+ Type: StorageType(targetSec.Key("STORAGE_TYPE").String()),
+ }
+
+ targetPath := ConfigSectionKeyString(targetSec, "PATH", "")
+ var fallbackPath string
+ if targetPath == "" { // no path
+ fallbackPath = filepath.Join(AppDataPath, name)
+ } else {
+ if tp == targetSecIsStorage || tp == targetSecIsDefault {
+ fallbackPath = filepath.Join(targetPath, name)
+ } else {
+ fallbackPath = targetPath
+ }
+ if !filepath.IsAbs(fallbackPath) {
+ fallbackPath = filepath.Join(AppDataPath, fallbackPath)
+ }
+ }
+
+ if overrideSec == nil { // no override section
+ storage.Path = fallbackPath
+ } else {
+ storage.Path = ConfigSectionKeyString(overrideSec, "PATH", "")
+ if storage.Path == "" { // overrideSec has no path
+ storage.Path = fallbackPath
+ } else if !filepath.IsAbs(storage.Path) {
+ if targetPath == "" {
+ storage.Path = filepath.Join(AppDataPath, storage.Path)
+ } else {
+ storage.Path = filepath.Join(targetPath, storage.Path)
+ }
+ }
+ }
+
+ return &storage, nil
+}
+
+func getStorageForMinio(targetSec, overrideSec ConfigSection, tp targetSecType, name string) (*Storage, error) {
+ var storage Storage
+ storage.Type = StorageType(targetSec.Key("STORAGE_TYPE").String())
+ if err := targetSec.MapTo(&storage.MinioConfig); err != nil {
+ return nil, fmt.Errorf("map minio config failed: %v", err)
+ }
+
+ var defaultPath string
+ if storage.MinioConfig.BasePath != "" {
+ if tp == targetSecIsStorage || tp == targetSecIsDefault {
+ defaultPath = strings.TrimSuffix(storage.MinioConfig.BasePath, "/") + "/" + name + "/"
+ } else {
+ defaultPath = storage.MinioConfig.BasePath
+ }
+ }
+ if defaultPath == "" {
+ defaultPath = name + "/"
+ }
+
+ if overrideSec != nil {
+ storage.MinioConfig.ServeDirect = ConfigSectionKeyBool(overrideSec, "SERVE_DIRECT", storage.MinioConfig.ServeDirect)
+ storage.MinioConfig.BasePath = ConfigSectionKeyString(overrideSec, "MINIO_BASE_PATH", defaultPath)
+ storage.MinioConfig.Bucket = ConfigSectionKeyString(overrideSec, "MINIO_BUCKET", storage.MinioConfig.Bucket)
+ } else {
+ storage.MinioConfig.BasePath = defaultPath
+ }
+ return &storage, nil
+}
diff --git a/modules/setting/storage_test.go b/modules/setting/storage_test.go
new file mode 100644
index 0000000..2716079
--- /dev/null
+++ b/modules/setting/storage_test.go
@@ -0,0 +1,468 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "path/filepath"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func Test_getStorageMultipleName(t *testing.T) {
+ iniStr := `
+[lfs]
+MINIO_BUCKET = gitea-lfs
+
+[attachment]
+MINIO_BUCKET = gitea-attachment
+
+[storage]
+STORAGE_TYPE = minio
+MINIO_BUCKET = gitea-storage
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+ assert.EqualValues(t, "gitea-attachment", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadLFSFrom(cfg))
+ assert.EqualValues(t, "gitea-lfs", LFS.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadAvatarsFrom(cfg))
+ assert.EqualValues(t, "gitea-storage", Avatar.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "avatars/", Avatar.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageUseOtherNameAsType(t *testing.T) {
+ iniStr := `
+[attachment]
+STORAGE_TYPE = lfs
+
+[storage.lfs]
+STORAGE_TYPE = minio
+MINIO_BUCKET = gitea-storage
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadAttachmentFrom(cfg))
+ assert.EqualValues(t, "gitea-storage", Attachment.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "attachments/", Attachment.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadLFSFrom(cfg))
+ assert.EqualValues(t, "gitea-storage", LFS.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "lfs/", LFS.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageInheritStorageType(t *testing.T) {
+ iniStr := `
+[storage]
+STORAGE_TYPE = minio
+`
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+
+ require.NoError(t, loadPackagesFrom(cfg))
+ assert.EqualValues(t, "minio", Packages.Storage.Type)
+ assert.EqualValues(t, "gitea", Packages.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "packages/", Packages.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ assert.EqualValues(t, "minio", RepoArchive.Storage.Type)
+ assert.EqualValues(t, "gitea", RepoArchive.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadActionsFrom(cfg))
+ assert.EqualValues(t, "minio", Actions.LogStorage.Type)
+ assert.EqualValues(t, "gitea", Actions.LogStorage.MinioConfig.Bucket)
+ assert.EqualValues(t, "actions_log/", Actions.LogStorage.MinioConfig.BasePath)
+
+ assert.EqualValues(t, "minio", Actions.ArtifactStorage.Type)
+ assert.EqualValues(t, "gitea", Actions.ArtifactStorage.MinioConfig.Bucket)
+ assert.EqualValues(t, "actions_artifacts/", Actions.ArtifactStorage.MinioConfig.BasePath)
+
+ require.NoError(t, loadAvatarsFrom(cfg))
+ assert.EqualValues(t, "minio", Avatar.Storage.Type)
+ assert.EqualValues(t, "gitea", Avatar.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "avatars/", Avatar.Storage.MinioConfig.BasePath)
+
+ require.NoError(t, loadRepoAvatarFrom(cfg))
+ assert.EqualValues(t, "minio", RepoAvatar.Storage.Type)
+ assert.EqualValues(t, "gitea", RepoAvatar.Storage.MinioConfig.Bucket)
+ assert.EqualValues(t, "repo-avatars/", RepoAvatar.Storage.MinioConfig.BasePath)
+}
+
+type testLocalStoragePathCase struct {
+ loader func(rootCfg ConfigProvider) error
+ storagePtr **Storage
+ expectedPath string
+}
+
+func testLocalStoragePath(t *testing.T, appDataPath, iniStr string, cases []testLocalStoragePathCase) {
+ cfg, err := NewConfigProviderFromData(iniStr)
+ require.NoError(t, err)
+ AppDataPath = appDataPath
+ for _, c := range cases {
+ require.NoError(t, c.loader(cfg))
+ storage := *c.storagePtr
+
+ assert.EqualValues(t, "local", storage.Type)
+ assert.True(t, filepath.IsAbs(storage.Path))
+ assert.EqualValues(t, filepath.Clean(c.expectedPath), filepath.Clean(storage.Path))
+ }
+}
+
+func Test_getStorageInheritStorageTypeLocal(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/repo-archive"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPath(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+PATH = /data/gitea
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/repo-archive"},
+ {loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalRelativePath(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+PATH = storages
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/storages/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/storages/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/storages/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/storages/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/storages/repo-archive"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/storages/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/storages/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/storages/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+PATH = /data/gitea
+
+[repo-archive]
+PATH = /data/gitea/the-archives-dir
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/the-archives-dir"},
+ {loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverrideEmpty(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+PATH = /data/gitea
+
+[repo-archive]
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/repo-archive"},
+ {loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalRelativePathOverride(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage]
+STORAGE_TYPE = local
+PATH = /data/gitea
+
+[repo-archive]
+PATH = the-archives-dir
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/data/gitea/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/data/gitea/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/data/gitea/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/data/gitea/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/the-archives-dir"},
+ {loadActionsFrom, &Actions.LogStorage, "/data/gitea/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/data/gitea/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/data/gitea/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride3(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+STORAGE_TYPE = local
+PATH = /data/gitea/archives
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/archives"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride3_5(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+STORAGE_TYPE = local
+PATH = a-relative-path
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/a-relative-path"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride4(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+STORAGE_TYPE = local
+PATH = /data/gitea/archives
+
+[repo-archive]
+PATH = /tmp/gitea/archives
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/tmp/gitea/archives"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride5(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+STORAGE_TYPE = local
+PATH = /data/gitea/archives
+
+[repo-archive]
+`, []testLocalStoragePathCase{
+ {loadAttachmentFrom, &Attachment.Storage, "/appdata/attachments"},
+ {loadLFSFrom, &LFS.Storage, "/appdata/lfs"},
+ {loadActionsFrom, &Actions.ArtifactStorage, "/appdata/actions_artifacts"},
+ {loadPackagesFrom, &Packages.Storage, "/appdata/packages"},
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/data/gitea/archives"},
+ {loadActionsFrom, &Actions.LogStorage, "/appdata/actions_log"},
+ {loadAvatarsFrom, &Avatar.Storage, "/appdata/avatars"},
+ {loadRepoAvatarFrom, &RepoAvatar.Storage, "/appdata/repo-avatars"},
+ })
+}
+
+func Test_getStorageInheritStorageTypeLocalPathOverride72(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[repo-archive]
+STORAGE_TYPE = local
+PATH = archives
+`, []testLocalStoragePathCase{
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/archives"},
+ })
+}
+
+func Test_getStorageConfiguration20(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[repo-archive]
+STORAGE_TYPE = my_storage
+PATH = archives
+`)
+ require.NoError(t, err)
+
+ require.Error(t, loadRepoArchiveFrom(cfg))
+}
+
+func Test_getStorageConfiguration21(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+`, []testLocalStoragePathCase{
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/repo-archive"},
+ })
+}
+
+func Test_getStorageConfiguration22(t *testing.T) {
+ testLocalStoragePath(t, "/appdata", `
+[storage.repo-archive]
+PATH = archives
+`, []testLocalStoragePathCase{
+ {loadRepoArchiveFrom, &RepoArchive.Storage, "/appdata/archives"},
+ })
+}
+
+func Test_getStorageConfiguration23(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[repo-archive]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+`)
+ require.NoError(t, err)
+
+ _, err = getStorage(cfg, "", "", nil)
+ require.Error(t, err)
+
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ cp := RepoArchive.Storage.ToShadowCopy()
+ assert.EqualValues(t, "******", cp.MinioConfig.AccessKeyID)
+ assert.EqualValues(t, "******", cp.MinioConfig.SecretAccessKey)
+}
+
+func Test_getStorageConfiguration24(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[repo-archive]
+STORAGE_TYPE = my_archive
+
+[storage.my_archive]
+; unsupported, storage type should be defined explicitly
+PATH = archives
+`)
+ require.NoError(t, err)
+ require.Error(t, loadRepoArchiveFrom(cfg))
+}
+
+func Test_getStorageConfiguration25(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[repo-archive]
+STORAGE_TYPE = my_archive
+
+[storage.my_archive]
+; unsupported, storage type should be known type
+STORAGE_TYPE = unknown // should be local or minio
+PATH = archives
+`)
+ require.NoError(t, err)
+ require.Error(t, loadRepoArchiveFrom(cfg))
+}
+
+func Test_getStorageConfiguration26(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[repo-archive]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+; wrong configuration
+MINIO_USE_SSL = abc
+`)
+ require.NoError(t, err)
+ // require.Error(t, loadRepoArchiveFrom(cfg))
+ // FIXME: this should return error but now ini package's MapTo() doesn't check type
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+}
+
+func Test_getStorageConfiguration27(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[storage.repo-archive]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+MINIO_USE_SSL = true
+`)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
+ assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
+ assert.True(t, RepoArchive.Storage.MinioConfig.UseSSL)
+ assert.EqualValues(t, "repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+}
+
+func Test_getStorageConfiguration28(t *testing.T) {
+ cfg, err := NewConfigProviderFromData(`
+[storage]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+MINIO_USE_SSL = true
+MINIO_BASE_PATH = /prefix
+`)
+ require.NoError(t, err)
+ require.NoError(t, loadRepoArchiveFrom(cfg))
+ assert.EqualValues(t, "my_access_key", RepoArchive.Storage.MinioConfig.AccessKeyID)
+ assert.EqualValues(t, "my_secret_key", RepoArchive.Storage.MinioConfig.SecretAccessKey)
+ assert.True(t, RepoArchive.Storage.MinioConfig.UseSSL)
+ assert.EqualValues(t, "/prefix/repo-archive/", RepoArchive.Storage.MinioConfig.BasePath)
+
+ cfg, err = NewConfigProviderFromData(`
+[storage]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+MINIO_USE_SSL = true
+MINIO_BASE_PATH = /prefix
+
+[lfs]
+MINIO_BASE_PATH = /lfs
+`)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+ assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
+ assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
+ assert.True(t, true, LFS.Storage.MinioConfig.UseSSL)
+ assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
+
+ cfg, err = NewConfigProviderFromData(`
+[storage]
+STORAGE_TYPE = minio
+MINIO_ACCESS_KEY_ID = my_access_key
+MINIO_SECRET_ACCESS_KEY = my_secret_key
+MINIO_USE_SSL = true
+MINIO_BASE_PATH = /prefix
+
+[storage.lfs]
+MINIO_BASE_PATH = /lfs
+`)
+ require.NoError(t, err)
+ require.NoError(t, loadLFSFrom(cfg))
+ assert.EqualValues(t, "my_access_key", LFS.Storage.MinioConfig.AccessKeyID)
+ assert.EqualValues(t, "my_secret_key", LFS.Storage.MinioConfig.SecretAccessKey)
+ assert.True(t, LFS.Storage.MinioConfig.UseSSL)
+ assert.EqualValues(t, "/lfs", LFS.Storage.MinioConfig.BasePath)
+}
diff --git a/modules/setting/task.go b/modules/setting/task.go
new file mode 100644
index 0000000..f75b4f1
--- /dev/null
+++ b/modules/setting/task.go
@@ -0,0 +1,26 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+// DEPRECATED should not be removed because users maybe upgrade from lower version to the latest version
+// if these are removed, the warning will not be shown
+// - will need to set default for [queue.task] LENGTH to 1000 though
+func loadTaskFrom(rootCfg ConfigProvider) {
+ taskSec := rootCfg.Section("task")
+ queueTaskSec := rootCfg.Section("queue.task")
+
+ deprecatedSetting(rootCfg, "task", "QUEUE_TYPE", "queue.task", "TYPE", "v1.19.0")
+ deprecatedSetting(rootCfg, "task", "QUEUE_CONN_STR", "queue.task", "CONN_STR", "v1.19.0")
+ deprecatedSetting(rootCfg, "task", "QUEUE_LENGTH", "queue.task", "LENGTH", "v1.19.0")
+
+ switch taskSec.Key("QUEUE_TYPE").MustString("channel") {
+ case "channel":
+ queueTaskSec.Key("TYPE").MustString("persistable-channel")
+ queueTaskSec.Key("CONN_STR").MustString(taskSec.Key("QUEUE_CONN_STR").MustString(""))
+ case "redis":
+ queueTaskSec.Key("TYPE").MustString("redis")
+ queueTaskSec.Key("CONN_STR").MustString(taskSec.Key("QUEUE_CONN_STR").MustString("addrs=127.0.0.1:6379 db=0"))
+ }
+ queueTaskSec.Key("LENGTH").MustInt(taskSec.Key("QUEUE_LENGTH").MustInt(1000))
+}
diff --git a/modules/setting/time.go b/modules/setting/time.go
new file mode 100644
index 0000000..39acba1
--- /dev/null
+++ b/modules/setting/time.go
@@ -0,0 +1,28 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// DefaultUILocation is the location on the UI, so that we can display the time on UI.
+var DefaultUILocation = time.Local
+
+func loadTimeFrom(rootCfg ConfigProvider) {
+ zone := rootCfg.Section("time").Key("DEFAULT_UI_LOCATION").String()
+ if zone != "" {
+ var err error
+ DefaultUILocation, err = time.LoadLocation(zone)
+ if err != nil {
+ log.Fatal("Load time zone failed: %v", err)
+ }
+ log.Info("Default UI Location is %v", zone)
+ }
+ if DefaultUILocation == nil {
+ DefaultUILocation = time.Local
+ }
+}
diff --git a/modules/setting/ui.go b/modules/setting/ui.go
new file mode 100644
index 0000000..40f1812
--- /dev/null
+++ b/modules/setting/ui.go
@@ -0,0 +1,170 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "time"
+
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+)
+
+// UI settings
+var UI = struct {
+ ExplorePagingNum int
+ SitemapPagingNum int
+ IssuePagingNum int
+ RepoSearchPagingNum int
+ MembersPagingNum int
+ FeedMaxCommitNum int
+ FeedPagingNum int
+ PackagesPagingNum int
+ GraphMaxCommitNum int
+ CodeCommentLines int
+ ReactionMaxUserNum int
+ MaxDisplayFileSize int64
+ ShowUserEmail bool
+ DefaultShowFullName bool
+ DefaultTheme string
+ Themes []string
+ Reactions []string
+ ReactionsLookup container.Set[string] `ini:"-"`
+ CustomEmojis []string
+ CustomEmojisMap map[string]string `ini:"-"`
+ SearchRepoDescription bool
+ OnlyShowRelevantRepos bool
+ ExploreDefaultSort string `ini:"EXPLORE_PAGING_DEFAULT_SORT"`
+ PreferredTimestampTense string
+
+ AmbiguousUnicodeDetection bool
+ SkipEscapeContexts []string
+
+ Notification struct {
+ MinTimeout time.Duration
+ TimeoutStep time.Duration
+ MaxTimeout time.Duration
+ EventSourceUpdateTime time.Duration
+ } `ini:"ui.notification"`
+
+ SVG struct {
+ Enabled bool `ini:"ENABLE_RENDER"`
+ } `ini:"ui.svg"`
+
+ CSV struct {
+ MaxFileSize int64
+ MaxRows int
+ } `ini:"ui.csv"`
+
+ Admin struct {
+ UserPagingNum int
+ RepoPagingNum int
+ NoticePagingNum int
+ OrgPagingNum int
+ } `ini:"ui.admin"`
+ User struct {
+ RepoPagingNum int
+ } `ini:"ui.user"`
+ Meta struct {
+ Author string
+ Description string
+ Keywords string
+ } `ini:"ui.meta"`
+}{
+ ExplorePagingNum: 20,
+ SitemapPagingNum: 20,
+ IssuePagingNum: 20,
+ RepoSearchPagingNum: 20,
+ MembersPagingNum: 20,
+ FeedMaxCommitNum: 5,
+ FeedPagingNum: 20,
+ PackagesPagingNum: 20,
+ GraphMaxCommitNum: 100,
+ CodeCommentLines: 4,
+ ReactionMaxUserNum: 10,
+ MaxDisplayFileSize: 8388608,
+ DefaultTheme: `forgejo-auto`,
+ Themes: []string{`forgejo-auto`, `forgejo-light`, `forgejo-dark`, `gitea-auto`, `gitea-light`, `gitea-dark`, `forgejo-auto-deuteranopia-protanopia`, `forgejo-light-deuteranopia-protanopia`, `forgejo-dark-deuteranopia-protanopia`, `forgejo-auto-tritanopia`, `forgejo-light-tritanopia`, `forgejo-dark-tritanopia`},
+ Reactions: []string{`+1`, `-1`, `laugh`, `hooray`, `confused`, `heart`, `rocket`, `eyes`},
+ CustomEmojis: []string{`git`, `gitea`, `codeberg`, `gitlab`, `github`, `gogs`, `forgejo`},
+ CustomEmojisMap: map[string]string{"git": ":git:", "gitea": ":gitea:", "codeberg": ":codeberg:", "gitlab": ":gitlab:", "github": ":github:", "gogs": ":gogs:", "forgejo": ":forgejo:"},
+ ExploreDefaultSort: "recentupdate",
+ PreferredTimestampTense: "mixed",
+
+ AmbiguousUnicodeDetection: true,
+ SkipEscapeContexts: []string{},
+
+ Notification: struct {
+ MinTimeout time.Duration
+ TimeoutStep time.Duration
+ MaxTimeout time.Duration
+ EventSourceUpdateTime time.Duration
+ }{
+ MinTimeout: 10 * time.Second,
+ TimeoutStep: 10 * time.Second,
+ MaxTimeout: 60 * time.Second,
+ EventSourceUpdateTime: 10 * time.Second,
+ },
+ SVG: struct {
+ Enabled bool `ini:"ENABLE_RENDER"`
+ }{
+ Enabled: true,
+ },
+ CSV: struct {
+ MaxFileSize int64
+ MaxRows int
+ }{
+ MaxFileSize: 524288,
+ MaxRows: 2500,
+ },
+ Admin: struct {
+ UserPagingNum int
+ RepoPagingNum int
+ NoticePagingNum int
+ OrgPagingNum int
+ }{
+ UserPagingNum: 50,
+ RepoPagingNum: 50,
+ NoticePagingNum: 25,
+ OrgPagingNum: 50,
+ },
+ User: struct {
+ RepoPagingNum int
+ }{
+ RepoPagingNum: 15,
+ },
+ Meta: struct {
+ Author string
+ Description string
+ Keywords string
+ }{
+ Author: "Forgejo – Beyond coding. We forge.",
+ Description: "Forgejo is a self-hosted lightweight software forge. Easy to install and low maintenance, it just does the job.",
+ Keywords: "git,forge,forgejo",
+ },
+}
+
+func loadUIFrom(rootCfg ConfigProvider) {
+ mustMapSetting(rootCfg, "ui", &UI)
+ sec := rootCfg.Section("ui")
+ UI.ShowUserEmail = sec.Key("SHOW_USER_EMAIL").MustBool(true)
+ UI.DefaultShowFullName = sec.Key("DEFAULT_SHOW_FULL_NAME").MustBool(false)
+ UI.SearchRepoDescription = sec.Key("SEARCH_REPO_DESCRIPTION").MustBool(true)
+
+ if UI.PreferredTimestampTense != "mixed" && UI.PreferredTimestampTense != "absolute" {
+ log.Fatal("ui.PREFERRED_TIMESTAMP_TENSE must be either 'mixed' or 'absolute'")
+ }
+
+ // OnlyShowRelevantRepos=false is important for many private/enterprise instances,
+ // because many private repositories do not have "description/topic", users just want to search by their names.
+ UI.OnlyShowRelevantRepos = sec.Key("ONLY_SHOW_RELEVANT_REPOS").MustBool(false)
+
+ UI.ReactionsLookup = make(container.Set[string])
+ for _, reaction := range UI.Reactions {
+ UI.ReactionsLookup.Add(reaction)
+ }
+ UI.CustomEmojisMap = make(map[string]string)
+ for _, emoji := range UI.CustomEmojis {
+ UI.CustomEmojisMap[emoji] = ":" + emoji + ":"
+ }
+}
diff --git a/modules/setting/webhook.go b/modules/setting/webhook.go
new file mode 100644
index 0000000..7b1ab4d
--- /dev/null
+++ b/modules/setting/webhook.go
@@ -0,0 +1,48 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package setting
+
+import (
+ "net/url"
+
+ "code.gitea.io/gitea/modules/log"
+)
+
+// Webhook settings
+var Webhook = struct {
+ QueueLength int
+ DeliverTimeout int
+ SkipTLSVerify bool
+ AllowedHostList string
+ PagingNum int
+ ProxyURL string
+ ProxyURLFixed *url.URL
+ ProxyHosts []string
+}{
+ QueueLength: 1000,
+ DeliverTimeout: 5,
+ SkipTLSVerify: false,
+ PagingNum: 10,
+ ProxyURL: "",
+ ProxyHosts: []string{},
+}
+
+func loadWebhookFrom(rootCfg ConfigProvider) {
+ sec := rootCfg.Section("webhook")
+ Webhook.QueueLength = sec.Key("QUEUE_LENGTH").MustInt(1000)
+ Webhook.DeliverTimeout = sec.Key("DELIVER_TIMEOUT").MustInt(5)
+ Webhook.SkipTLSVerify = sec.Key("SKIP_TLS_VERIFY").MustBool()
+ Webhook.AllowedHostList = sec.Key("ALLOWED_HOST_LIST").MustString("")
+ Webhook.PagingNum = sec.Key("PAGING_NUM").MustInt(10)
+ Webhook.ProxyURL = sec.Key("PROXY_URL").MustString("")
+ if Webhook.ProxyURL != "" {
+ var err error
+ Webhook.ProxyURLFixed, err = url.Parse(Webhook.ProxyURL)
+ if err != nil {
+ log.Error("Webhook PROXY_URL is not valid")
+ Webhook.ProxyURL = ""
+ }
+ }
+ Webhook.ProxyHosts = sec.Key("PROXY_HOSTS").Strings(",")
+}