summaryrefslogtreecommitdiffstats
path: root/cmd
diff options
context:
space:
mode:
authorDaniel Baumann <daniel@debian.org>2024-10-18 20:33:49 +0200
committerDaniel Baumann <daniel@debian.org>2024-12-12 23:57:56 +0100
commite68b9d00a6e05b3a941f63ffb696f91e554ac5ec (patch)
tree97775d6c13b0f416af55314eb6a89ef792474615 /cmd
parentInitial commit. (diff)
downloadforgejo-e68b9d00a6e05b3a941f63ffb696f91e554ac5ec.tar.xz
forgejo-e68b9d00a6e05b3a941f63ffb696f91e554ac5ec.zip
Adding upstream version 9.0.3.
Signed-off-by: Daniel Baumann <daniel@debian.org>
Diffstat (limited to '')
-rw-r--r--cmd/actions.go55
-rw-r--r--cmd/admin.go168
-rw-r--r--cmd/admin_auth.go111
-rw-r--r--cmd/admin_auth_ldap.go409
-rw-r--r--cmd/admin_auth_ldap_test.go1326
-rw-r--r--cmd/admin_auth_oauth.go299
-rw-r--r--cmd/admin_auth_stmp.go200
-rw-r--r--cmd/admin_regenerate.go46
-rw-r--r--cmd/admin_user.go21
-rw-r--r--cmd/admin_user_change_password.go80
-rw-r--r--cmd/admin_user_create.go175
-rw-r--r--cmd/admin_user_delete.go81
-rw-r--r--cmd/admin_user_generate_access_token.go94
-rw-r--r--cmd/admin_user_list.go60
-rw-r--r--cmd/admin_user_must_change_password.go60
-rw-r--r--cmd/cert.go196
-rw-r--r--cmd/cmd.go135
-rw-r--r--cmd/docs.go65
-rw-r--r--cmd/doctor.go219
-rw-r--r--cmd/doctor_convert.go49
-rw-r--r--cmd/doctor_test.go33
-rw-r--r--cmd/dump.go492
-rw-r--r--cmd/dump_repo.go192
-rw-r--r--cmd/dump_test.go118
-rw-r--r--cmd/embedded.go310
-rw-r--r--cmd/forgejo/actions.go242
-rw-r--r--cmd/forgejo/actions_test.go88
-rw-r--r--cmd/forgejo/f3.go77
-rw-r--r--cmd/forgejo/forgejo.go170
-rw-r--r--cmd/generate.go100
-rw-r--r--cmd/hook.go795
-rw-r--r--cmd/hook_test.go196
-rw-r--r--cmd/keys.go83
-rw-r--r--cmd/mailer.go50
-rw-r--r--cmd/main.go225
-rw-r--r--cmd/main_test.go179
-rw-r--r--cmd/manager.go154
-rw-r--r--cmd/manager_logging.go347
-rw-r--r--cmd/migrate.go45
-rw-r--r--cmd/migrate_storage.go267
-rw-r--r--cmd/migrate_storage_test.go134
-rw-r--r--cmd/restore_repo.go69
-rw-r--r--cmd/serv.go358
-rw-r--r--cmd/web.go357
-rw-r--r--cmd/web_acme.go135
-rw-r--r--cmd/web_graceful.go55
-rw-r--r--cmd/web_https.go191
47 files changed, 9311 insertions, 0 deletions
diff --git a/cmd/actions.go b/cmd/actions.go
new file mode 100644
index 0000000..10ae624
--- /dev/null
+++ b/cmd/actions.go
@@ -0,0 +1,55 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ // CmdActions represents the available actions sub-commands.
+ CmdActions = &cli.Command{
+ Name: "actions",
+ Usage: "Manage Forgejo Actions",
+ Subcommands: []*cli.Command{
+ subcmdActionsGenRunnerToken,
+ },
+ }
+
+ subcmdActionsGenRunnerToken = &cli.Command{
+ Name: "generate-runner-token",
+ Usage: "Generate a new token for a runner to use to register with the server",
+ Action: runGenerateActionsRunnerToken,
+ Aliases: []string{"grt"},
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "scope",
+ Aliases: []string{"s"},
+ Value: "",
+ Usage: "{owner}[/{repo}] - leave empty for a global runner",
+ },
+ },
+ }
+)
+
+func runGenerateActionsRunnerToken(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setting.MustInstalled()
+
+ scope := c.String("scope")
+
+ respText, extra := private.GenerateActionsRunnerToken(ctx, scope)
+ if extra.HasError() {
+ return handleCliResponseExtra(extra)
+ }
+ _, _ = fmt.Printf("%s\n", respText.Text)
+ return nil
+}
diff --git a/cmd/admin.go b/cmd/admin.go
new file mode 100644
index 0000000..6c9480e
--- /dev/null
+++ b/cmd/admin.go
@@ -0,0 +1,168 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ repo_model "code.gitea.io/gitea/models/repo"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/gitrepo"
+ "code.gitea.io/gitea/modules/log"
+ repo_module "code.gitea.io/gitea/modules/repository"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ // CmdAdmin represents the available admin sub-command.
+ CmdAdmin = &cli.Command{
+ Name: "admin",
+ Usage: "Perform common administrative operations",
+ Subcommands: []*cli.Command{
+ subcmdUser,
+ subcmdRepoSyncReleases,
+ subcmdRegenerate,
+ subcmdAuth,
+ subcmdSendMail,
+ },
+ }
+
+ subcmdRepoSyncReleases = &cli.Command{
+ Name: "repo-sync-releases",
+ Usage: "Synchronize repository releases with tags",
+ Action: runRepoSyncReleases,
+ }
+
+ subcmdRegenerate = &cli.Command{
+ Name: "regenerate",
+ Usage: "Regenerate specific files",
+ Subcommands: []*cli.Command{
+ microcmdRegenHooks,
+ microcmdRegenKeys,
+ },
+ }
+
+ subcmdAuth = &cli.Command{
+ Name: "auth",
+ Usage: "Modify external auth providers",
+ Subcommands: []*cli.Command{
+ microcmdAuthAddOauth,
+ microcmdAuthUpdateOauth,
+ microcmdAuthAddLdapBindDn,
+ microcmdAuthUpdateLdapBindDn,
+ microcmdAuthAddLdapSimpleAuth,
+ microcmdAuthUpdateLdapSimpleAuth,
+ microcmdAuthAddSMTP,
+ microcmdAuthUpdateSMTP,
+ microcmdAuthList,
+ microcmdAuthDelete,
+ },
+ }
+
+ subcmdSendMail = &cli.Command{
+ Name: "sendmail",
+ Usage: "Send a message to all users",
+ Action: runSendMail,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "title",
+ Usage: `a title of a message`,
+ Value: "",
+ },
+ &cli.StringFlag{
+ Name: "content",
+ Usage: "a content of a message",
+ Value: "",
+ },
+ &cli.BoolFlag{
+ Name: "force",
+ Aliases: []string{"f"},
+ Usage: "A flag to bypass a confirmation step",
+ },
+ },
+ }
+
+ idFlag = &cli.Int64Flag{
+ Name: "id",
+ Usage: "ID of authentication source",
+ }
+)
+
+func runRepoSyncReleases(_ *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ if err := git.InitSimple(ctx); err != nil {
+ return err
+ }
+
+ log.Trace("Synchronizing repository releases (this may take a while)")
+ for page := 1; ; page++ {
+ repos, count, err := repo_model.SearchRepositoryByName(ctx, &repo_model.SearchRepoOptions{
+ ListOptions: db.ListOptions{
+ PageSize: repo_model.RepositoryListDefaultPageSize,
+ Page: page,
+ },
+ Private: true,
+ })
+ if err != nil {
+ return fmt.Errorf("SearchRepositoryByName: %w", err)
+ }
+ if len(repos) == 0 {
+ break
+ }
+ log.Trace("Processing next %d repos of %d", len(repos), count)
+ for _, repo := range repos {
+ log.Trace("Synchronizing repo %s with path %s", repo.FullName(), repo.RepoPath())
+ gitRepo, err := gitrepo.OpenRepository(ctx, repo)
+ if err != nil {
+ log.Warn("OpenRepository: %v", err)
+ continue
+ }
+
+ oldnum, err := getReleaseCount(ctx, repo.ID)
+ if err != nil {
+ log.Warn(" GetReleaseCountByRepoID: %v", err)
+ }
+ log.Trace(" currentNumReleases is %d, running SyncReleasesWithTags", oldnum)
+
+ if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
+ log.Warn(" SyncReleasesWithTags: %v", err)
+ gitRepo.Close()
+ continue
+ }
+
+ count, err = getReleaseCount(ctx, repo.ID)
+ if err != nil {
+ log.Warn(" GetReleaseCountByRepoID: %v", err)
+ gitRepo.Close()
+ continue
+ }
+
+ log.Trace(" repo %s releases synchronized to tags: from %d to %d",
+ repo.FullName(), oldnum, count)
+ gitRepo.Close()
+ }
+ }
+
+ return nil
+}
+
+func getReleaseCount(ctx context.Context, id int64) (int64, error) {
+ return db.Count[repo_model.Release](
+ ctx,
+ repo_model.FindReleasesOptions{
+ RepoID: id,
+ IncludeTags: true,
+ },
+ )
+}
diff --git a/cmd/admin_auth.go b/cmd/admin_auth.go
new file mode 100644
index 0000000..4777a92
--- /dev/null
+++ b/cmd/admin_auth.go
@@ -0,0 +1,111 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "text/tabwriter"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ auth_service "code.gitea.io/gitea/services/auth"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ microcmdAuthDelete = &cli.Command{
+ Name: "delete",
+ Usage: "Delete specific auth source",
+ Flags: []cli.Flag{idFlag},
+ Action: runDeleteAuth,
+ }
+ microcmdAuthList = &cli.Command{
+ Name: "list",
+ Usage: "List auth sources",
+ Action: runListAuth,
+ Flags: []cli.Flag{
+ &cli.IntFlag{
+ Name: "min-width",
+ Usage: "Minimal cell width including any padding for the formatted table",
+ Value: 0,
+ },
+ &cli.IntFlag{
+ Name: "tab-width",
+ Usage: "width of tab characters in formatted table (equivalent number of spaces)",
+ Value: 8,
+ },
+ &cli.IntFlag{
+ Name: "padding",
+ Usage: "padding added to a cell before computing its width",
+ Value: 1,
+ },
+ &cli.StringFlag{
+ Name: "pad-char",
+ Usage: `ASCII char used for padding if padchar == '\\t', the Writer will assume that the width of a '\\t' in the formatted output is tabwidth, and cells are left-aligned independent of align_left (for correct-looking results, tabwidth must correspond to the tab width in the viewer displaying the result)`,
+ Value: "\t",
+ },
+ &cli.BoolFlag{
+ Name: "vertical-bars",
+ Usage: "Set to true to print vertical bars between columns",
+ },
+ },
+ }
+)
+
+func runListAuth(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ authSources, err := db.Find[auth_model.Source](ctx, auth_model.FindSourcesOptions{})
+ if err != nil {
+ return err
+ }
+
+ flags := tabwriter.AlignRight
+ if c.Bool("vertical-bars") {
+ flags |= tabwriter.Debug
+ }
+
+ padChar := byte('\t')
+ if len(c.String("pad-char")) > 0 {
+ padChar = c.String("pad-char")[0]
+ }
+
+ // loop through each source and print
+ w := tabwriter.NewWriter(os.Stdout, c.Int("min-width"), c.Int("tab-width"), c.Int("padding"), padChar, flags)
+ fmt.Fprintf(w, "ID\tName\tType\tEnabled\n")
+ for _, source := range authSources {
+ fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", source.ID, source.Name, source.Type.String(), source.IsActive)
+ }
+ w.Flush()
+
+ return nil
+}
+
+func runDeleteAuth(c *cli.Context) error {
+ if !c.IsSet("id") {
+ return errors.New("--id flag is missing")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
+ if err != nil {
+ return err
+ }
+
+ return auth_service.DeleteSource(ctx, source)
+}
diff --git a/cmd/admin_auth_ldap.go b/cmd/admin_auth_ldap.go
new file mode 100644
index 0000000..e3c8180
--- /dev/null
+++ b/cmd/admin_auth_ldap.go
@@ -0,0 +1,409 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth/source/ldap"
+
+ "github.com/urfave/cli/v2"
+)
+
+type (
+ authService struct {
+ initDB func(ctx context.Context) error
+ createAuthSource func(context.Context, *auth.Source) error
+ updateAuthSource func(context.Context, *auth.Source) error
+ getAuthSourceByID func(ctx context.Context, id int64) (*auth.Source, error)
+ }
+)
+
+var (
+ commonLdapCLIFlags = []cli.Flag{
+ &cli.StringFlag{
+ Name: "name",
+ Usage: "Authentication name.",
+ },
+ &cli.BoolFlag{
+ Name: "not-active",
+ Usage: "Deactivate the authentication source.",
+ },
+ &cli.BoolFlag{
+ Name: "active",
+ Usage: "Activate the authentication source.",
+ },
+ &cli.StringFlag{
+ Name: "security-protocol",
+ Usage: "Security protocol name.",
+ },
+ &cli.BoolFlag{
+ Name: "skip-tls-verify",
+ Usage: "Disable TLS verification.",
+ },
+ &cli.StringFlag{
+ Name: "host",
+ Usage: "The address where the LDAP server can be reached.",
+ },
+ &cli.IntFlag{
+ Name: "port",
+ Usage: "The port to use when connecting to the LDAP server.",
+ },
+ &cli.StringFlag{
+ Name: "user-search-base",
+ Usage: "The LDAP base at which user accounts will be searched for.",
+ },
+ &cli.StringFlag{
+ Name: "user-filter",
+ Usage: "An LDAP filter declaring how to find the user record that is attempting to authenticate.",
+ },
+ &cli.StringFlag{
+ Name: "admin-filter",
+ Usage: "An LDAP filter specifying if a user should be given administrator privileges.",
+ },
+ &cli.StringFlag{
+ Name: "restricted-filter",
+ Usage: "An LDAP filter specifying if a user should be given restricted status.",
+ },
+ &cli.BoolFlag{
+ Name: "allow-deactivate-all",
+ Usage: "Allow empty search results to deactivate all users.",
+ },
+ &cli.StringFlag{
+ Name: "username-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user name.",
+ },
+ &cli.StringFlag{
+ Name: "firstname-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user’s first name.",
+ },
+ &cli.StringFlag{
+ Name: "surname-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user’s surname.",
+ },
+ &cli.StringFlag{
+ Name: "email-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user’s email address.",
+ },
+ &cli.StringFlag{
+ Name: "public-ssh-key-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user’s public ssh key.",
+ },
+ &cli.BoolFlag{
+ Name: "skip-local-2fa",
+ Usage: "Set to true to skip local 2fa for users authenticated by this source",
+ },
+ &cli.StringFlag{
+ Name: "avatar-attribute",
+ Usage: "The attribute of the user’s LDAP record containing the user’s avatar.",
+ },
+ }
+
+ ldapBindDnCLIFlags = append(commonLdapCLIFlags,
+ &cli.StringFlag{
+ Name: "bind-dn",
+ Usage: "The DN to bind to the LDAP server with when searching for the user.",
+ },
+ &cli.StringFlag{
+ Name: "bind-password",
+ Usage: "The password for the Bind DN, if any.",
+ },
+ &cli.BoolFlag{
+ Name: "attributes-in-bind",
+ Usage: "Fetch attributes in bind DN context.",
+ },
+ &cli.BoolFlag{
+ Name: "synchronize-users",
+ Usage: "Enable user synchronization.",
+ },
+ &cli.BoolFlag{
+ Name: "disable-synchronize-users",
+ Usage: "Disable user synchronization.",
+ },
+ &cli.UintFlag{
+ Name: "page-size",
+ Usage: "Search page size.",
+ })
+
+ ldapSimpleAuthCLIFlags = append(commonLdapCLIFlags,
+ &cli.StringFlag{
+ Name: "user-dn",
+ Usage: "The user's DN.",
+ })
+
+ microcmdAuthAddLdapBindDn = &cli.Command{
+ Name: "add-ldap",
+ Usage: "Add new LDAP (via Bind DN) authentication source",
+ Action: func(c *cli.Context) error {
+ return newAuthService().addLdapBindDn(c)
+ },
+ Flags: ldapBindDnCLIFlags,
+ }
+
+ microcmdAuthUpdateLdapBindDn = &cli.Command{
+ Name: "update-ldap",
+ Usage: "Update existing LDAP (via Bind DN) authentication source",
+ Action: func(c *cli.Context) error {
+ return newAuthService().updateLdapBindDn(c)
+ },
+ Flags: append([]cli.Flag{idFlag}, ldapBindDnCLIFlags...),
+ }
+
+ microcmdAuthAddLdapSimpleAuth = &cli.Command{
+ Name: "add-ldap-simple",
+ Usage: "Add new LDAP (simple auth) authentication source",
+ Action: func(c *cli.Context) error {
+ return newAuthService().addLdapSimpleAuth(c)
+ },
+ Flags: ldapSimpleAuthCLIFlags,
+ }
+
+ microcmdAuthUpdateLdapSimpleAuth = &cli.Command{
+ Name: "update-ldap-simple",
+ Usage: "Update existing LDAP (simple auth) authentication source",
+ Action: func(c *cli.Context) error {
+ return newAuthService().updateLdapSimpleAuth(c)
+ },
+ Flags: append([]cli.Flag{idFlag}, ldapSimpleAuthCLIFlags...),
+ }
+)
+
+// newAuthService creates a service with default functions.
+func newAuthService() *authService {
+ return &authService{
+ initDB: initDB,
+ createAuthSource: auth.CreateSource,
+ updateAuthSource: auth.UpdateSource,
+ getAuthSourceByID: auth.GetSourceByID,
+ }
+}
+
+// parseAuthSource assigns values on authSource according to command line flags.
+func parseAuthSource(c *cli.Context, authSource *auth.Source) {
+ if c.IsSet("name") {
+ authSource.Name = c.String("name")
+ }
+ if c.IsSet("not-active") {
+ authSource.IsActive = !c.Bool("not-active")
+ }
+ if c.IsSet("active") {
+ authSource.IsActive = c.Bool("active")
+ }
+ if c.IsSet("synchronize-users") {
+ authSource.IsSyncEnabled = c.Bool("synchronize-users")
+ }
+ if c.IsSet("disable-synchronize-users") {
+ authSource.IsSyncEnabled = !c.Bool("disable-synchronize-users")
+ }
+}
+
+// parseLdapConfig assigns values on config according to command line flags.
+func parseLdapConfig(c *cli.Context, config *ldap.Source) error {
+ if c.IsSet("name") {
+ config.Name = c.String("name")
+ }
+ if c.IsSet("host") {
+ config.Host = c.String("host")
+ }
+ if c.IsSet("port") {
+ config.Port = c.Int("port")
+ }
+ if c.IsSet("security-protocol") {
+ p, ok := findLdapSecurityProtocolByName(c.String("security-protocol"))
+ if !ok {
+ return fmt.Errorf("Unknown security protocol name: %s", c.String("security-protocol"))
+ }
+ config.SecurityProtocol = p
+ }
+ if c.IsSet("skip-tls-verify") {
+ config.SkipVerify = c.Bool("skip-tls-verify")
+ }
+ if c.IsSet("bind-dn") {
+ config.BindDN = c.String("bind-dn")
+ }
+ if c.IsSet("user-dn") {
+ config.UserDN = c.String("user-dn")
+ }
+ if c.IsSet("bind-password") {
+ config.BindPassword = c.String("bind-password")
+ }
+ if c.IsSet("user-search-base") {
+ config.UserBase = c.String("user-search-base")
+ }
+ if c.IsSet("username-attribute") {
+ config.AttributeUsername = c.String("username-attribute")
+ }
+ if c.IsSet("firstname-attribute") {
+ config.AttributeName = c.String("firstname-attribute")
+ }
+ if c.IsSet("surname-attribute") {
+ config.AttributeSurname = c.String("surname-attribute")
+ }
+ if c.IsSet("email-attribute") {
+ config.AttributeMail = c.String("email-attribute")
+ }
+ if c.IsSet("attributes-in-bind") {
+ config.AttributesInBind = c.Bool("attributes-in-bind")
+ }
+ if c.IsSet("public-ssh-key-attribute") {
+ config.AttributeSSHPublicKey = c.String("public-ssh-key-attribute")
+ }
+ if c.IsSet("avatar-attribute") {
+ config.AttributeAvatar = c.String("avatar-attribute")
+ }
+ if c.IsSet("page-size") {
+ config.SearchPageSize = uint32(c.Uint("page-size"))
+ }
+ if c.IsSet("user-filter") {
+ config.Filter = c.String("user-filter")
+ }
+ if c.IsSet("admin-filter") {
+ config.AdminFilter = c.String("admin-filter")
+ }
+ if c.IsSet("restricted-filter") {
+ config.RestrictedFilter = c.String("restricted-filter")
+ }
+ if c.IsSet("allow-deactivate-all") {
+ config.AllowDeactivateAll = c.Bool("allow-deactivate-all")
+ }
+ if c.IsSet("skip-local-2fa") {
+ config.SkipLocalTwoFA = c.Bool("skip-local-2fa")
+ }
+ return nil
+}
+
+// findLdapSecurityProtocolByName finds security protocol by its name ignoring case.
+// It returns the value of the security protocol and if it was found.
+func findLdapSecurityProtocolByName(name string) (ldap.SecurityProtocol, bool) {
+ for i, n := range ldap.SecurityProtocolNames {
+ if strings.EqualFold(name, n) {
+ return i, true
+ }
+ }
+ return 0, false
+}
+
+// getAuthSource gets the login source by its id defined in the command line flags.
+// It returns an error if the id is not set, does not match any source or if the source is not of expected type.
+func (a *authService) getAuthSource(ctx context.Context, c *cli.Context, authType auth.Type) (*auth.Source, error) {
+ if err := argsSet(c, "id"); err != nil {
+ return nil, err
+ }
+
+ authSource, err := a.getAuthSourceByID(ctx, c.Int64("id"))
+ if err != nil {
+ return nil, err
+ }
+
+ if authSource.Type != authType {
+ return nil, fmt.Errorf("Invalid authentication type. expected: %s, actual: %s", authType.String(), authSource.Type.String())
+ }
+
+ return authSource, nil
+}
+
+// addLdapBindDn adds a new LDAP via Bind DN authentication source.
+func (a *authService) addLdapBindDn(c *cli.Context) error {
+ if err := argsSet(c, "name", "security-protocol", "host", "port", "user-search-base", "user-filter", "email-attribute"); err != nil {
+ return err
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := a.initDB(ctx); err != nil {
+ return err
+ }
+
+ authSource := &auth.Source{
+ Type: auth.LDAP,
+ IsActive: true, // active by default
+ Cfg: &ldap.Source{
+ Enabled: true, // always true
+ },
+ }
+
+ parseAuthSource(c, authSource)
+ if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
+ return err
+ }
+
+ return a.createAuthSource(ctx, authSource)
+}
+
+// updateLdapBindDn updates a new LDAP via Bind DN authentication source.
+func (a *authService) updateLdapBindDn(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := a.initDB(ctx); err != nil {
+ return err
+ }
+
+ authSource, err := a.getAuthSource(ctx, c, auth.LDAP)
+ if err != nil {
+ return err
+ }
+
+ parseAuthSource(c, authSource)
+ if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
+ return err
+ }
+
+ return a.updateAuthSource(ctx, authSource)
+}
+
+// addLdapSimpleAuth adds a new LDAP (simple auth) authentication source.
+func (a *authService) addLdapSimpleAuth(c *cli.Context) error {
+ if err := argsSet(c, "name", "security-protocol", "host", "port", "user-dn", "user-filter", "email-attribute"); err != nil {
+ return err
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := a.initDB(ctx); err != nil {
+ return err
+ }
+
+ authSource := &auth.Source{
+ Type: auth.DLDAP,
+ IsActive: true, // active by default
+ Cfg: &ldap.Source{
+ Enabled: true, // always true
+ },
+ }
+
+ parseAuthSource(c, authSource)
+ if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
+ return err
+ }
+
+ return a.createAuthSource(ctx, authSource)
+}
+
+// updateLdapBindDn updates a new LDAP (simple auth) authentication source.
+func (a *authService) updateLdapSimpleAuth(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := a.initDB(ctx); err != nil {
+ return err
+ }
+
+ authSource, err := a.getAuthSource(ctx, c, auth.DLDAP)
+ if err != nil {
+ return err
+ }
+
+ parseAuthSource(c, authSource)
+ if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
+ return err
+ }
+
+ return a.updateAuthSource(ctx, authSource)
+}
diff --git a/cmd/admin_auth_ldap_test.go b/cmd/admin_auth_ldap_test.go
new file mode 100644
index 0000000..d5385d0
--- /dev/null
+++ b/cmd/admin_auth_ldap_test.go
@@ -0,0 +1,1326 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "testing"
+
+ "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth/source/ldap"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/urfave/cli/v2"
+)
+
+func TestAddLdapBindDn(t *testing.T) {
+ // Mock cli functions to do not exit on error
+ osExiter := cli.OsExiter
+ defer func() { cli.OsExiter = osExiter }()
+ cli.OsExiter = func(code int) {}
+
+ // Test cases
+ cases := []struct {
+ args []string
+ source *auth.Source
+ errMsg string
+ }{
+ // case 0
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source full",
+ "--not-active",
+ "--security-protocol", "ldaps",
+ "--skip-tls-verify",
+ "--host", "ldap-bind-server full",
+ "--port", "9876",
+ "--user-search-base", "ou=Users,dc=full-domain-bind,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--restricted-filter", "(memberOf=cn=restricted-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--username-attribute", "uid-bind full",
+ "--firstname-attribute", "givenName-bind full",
+ "--surname-attribute", "sn-bind full",
+ "--email-attribute", "mail-bind full",
+ "--public-ssh-key-attribute", "publickey-bind full",
+ "--avatar-attribute", "avatar-bind full",
+ "--bind-dn", "cn=readonly,dc=full-domain-bind,dc=org",
+ "--bind-password", "secret-bind-full",
+ "--attributes-in-bind",
+ "--synchronize-users",
+ "--page-size", "99",
+ },
+ source: &auth.Source{
+ Type: auth.LDAP,
+ Name: "ldap (via Bind DN) source full",
+ IsActive: false,
+ IsSyncEnabled: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) source full",
+ Host: "ldap-bind-server full",
+ Port: 9876,
+ SecurityProtocol: ldap.SecurityProtocol(1),
+ SkipVerify: true,
+ BindDN: "cn=readonly,dc=full-domain-bind,dc=org",
+ BindPassword: "secret-bind-full",
+ UserBase: "ou=Users,dc=full-domain-bind,dc=org",
+ AttributeUsername: "uid-bind full",
+ AttributeName: "givenName-bind full",
+ AttributeSurname: "sn-bind full",
+ AttributeMail: "mail-bind full",
+ AttributesInBind: true,
+ AttributeSSHPublicKey: "publickey-bind full",
+ AttributeAvatar: "avatar-bind full",
+ SearchPageSize: 99,
+ Filter: "(memberOf=cn=user-group,ou=example,dc=full-domain-bind,dc=org)",
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=full-domain-bind,dc=org)",
+ RestrictedFilter: "(memberOf=cn=restricted-group,ou=example,dc=full-domain-bind,dc=org)",
+ Enabled: true,
+ },
+ },
+ },
+ // case 1
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source min",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-bind-server min",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=min-domain-bind,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=min-domain-bind,dc=org)",
+ "--email-attribute", "mail-bind min",
+ },
+ source: &auth.Source{
+ Type: auth.LDAP,
+ Name: "ldap (via Bind DN) source min",
+ IsActive: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) source min",
+ Host: "ldap-bind-server min",
+ Port: 1234,
+ SecurityProtocol: ldap.SecurityProtocol(0),
+ UserBase: "ou=Users,dc=min-domain-bind,dc=org",
+ AttributeMail: "mail-bind min",
+ Filter: "(memberOf=cn=user-group,ou=example,dc=min-domain-bind,dc=org)",
+ Enabled: true,
+ },
+ },
+ },
+ // case 2
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--security-protocol", "zzzzz",
+ "--host", "ldap-server",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ "--email-attribute", "mail",
+ },
+ errMsg: "Unknown security protocol name: zzzzz",
+ },
+ // case 3
+ {
+ args: []string{
+ "ldap-test",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ "--email-attribute", "mail",
+ },
+ errMsg: "name is not set",
+ },
+ // case 4
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--host", "ldap-server",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ "--email-attribute", "mail",
+ },
+ errMsg: "security-protocol is not set",
+ },
+ // case 5
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--security-protocol", "unencrypted",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ "--email-attribute", "mail",
+ },
+ errMsg: "host is not set",
+ },
+ // case 6
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ "--email-attribute", "mail",
+ },
+ errMsg: "port is not set",
+ },
+ // case 7
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--email-attribute", "mail",
+ },
+ errMsg: "user-filter is not set",
+ },
+ // case 8
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (via Bind DN) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "1234",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ },
+ errMsg: "email-attribute is not set",
+ },
+ }
+
+ for n, c := range cases {
+ // Mock functions.
+ var createdAuthSource *auth.Source
+ service := &authService{
+ initDB: func(context.Context) error {
+ return nil
+ },
+ createAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ createdAuthSource = authSource
+ return nil
+ },
+ updateAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ assert.FailNow(t, "case %d: should not call updateAuthSource", n)
+ return nil
+ },
+ getAuthSourceByID: func(ctx context.Context, id int64) (*auth.Source, error) {
+ assert.FailNow(t, "case %d: should not call getAuthSourceByID", n)
+ return nil, nil
+ },
+ }
+
+ // Create a copy of command to test
+ app := cli.NewApp()
+ app.Flags = microcmdAuthAddLdapBindDn.Flags
+ app.Action = service.addLdapBindDn
+
+ // Run it
+ err := app.Run(c.args)
+ if c.errMsg != "" {
+ assert.EqualError(t, err, c.errMsg, "case %d: error should match", n)
+ } else {
+ require.NoError(t, err, "case %d: should have no errors", n)
+ assert.Equal(t, c.source, createdAuthSource, "case %d: wrong authSource", n)
+ }
+ }
+}
+
+func TestAddLdapSimpleAuth(t *testing.T) {
+ // Mock cli functions to do not exit on error
+ osExiter := cli.OsExiter
+ defer func() { cli.OsExiter = osExiter }()
+ cli.OsExiter = func(code int) {}
+
+ // Test cases
+ cases := []struct {
+ args []string
+ authSource *auth.Source
+ errMsg string
+ }{
+ // case 0
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source full",
+ "--not-active",
+ "--security-protocol", "starttls",
+ "--skip-tls-verify",
+ "--host", "ldap-simple-server full",
+ "--port", "987",
+ "--user-search-base", "ou=Users,dc=full-domain-simple,dc=org",
+ "--user-filter", "(&(objectClass=posixAccount)(full-simple-cn=%s))",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=full-domain-simple,dc=org)",
+ "--restricted-filter", "(memberOf=cn=restricted-group,ou=example,dc=full-domain-simple,dc=org)",
+ "--username-attribute", "uid-simple full",
+ "--firstname-attribute", "givenName-simple full",
+ "--surname-attribute", "sn-simple full",
+ "--email-attribute", "mail-simple full",
+ "--public-ssh-key-attribute", "publickey-simple full",
+ "--avatar-attribute", "avatar-simple full",
+ "--user-dn", "cn=%s,ou=Users,dc=full-domain-simple,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Name: "ldap (simple auth) source full",
+ IsActive: false,
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) source full",
+ Host: "ldap-simple-server full",
+ Port: 987,
+ SecurityProtocol: ldap.SecurityProtocol(2),
+ SkipVerify: true,
+ UserDN: "cn=%s,ou=Users,dc=full-domain-simple,dc=org",
+ UserBase: "ou=Users,dc=full-domain-simple,dc=org",
+ AttributeUsername: "uid-simple full",
+ AttributeName: "givenName-simple full",
+ AttributeSurname: "sn-simple full",
+ AttributeMail: "mail-simple full",
+ AttributeSSHPublicKey: "publickey-simple full",
+ AttributeAvatar: "avatar-simple full",
+ Filter: "(&(objectClass=posixAccount)(full-simple-cn=%s))",
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=full-domain-simple,dc=org)",
+ RestrictedFilter: "(memberOf=cn=restricted-group,ou=example,dc=full-domain-simple,dc=org)",
+ Enabled: true,
+ },
+ },
+ },
+ // case 1
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source min",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-simple-server min",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(min-simple-cn=%s))",
+ "--email-attribute", "mail-simple min",
+ "--user-dn", "cn=%s,ou=Users,dc=min-domain-simple,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Name: "ldap (simple auth) source min",
+ IsActive: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) source min",
+ Host: "ldap-simple-server min",
+ Port: 123,
+ SecurityProtocol: ldap.SecurityProtocol(0),
+ UserDN: "cn=%s,ou=Users,dc=min-domain-simple,dc=org",
+ AttributeMail: "mail-simple min",
+ Filter: "(&(objectClass=posixAccount)(min-simple-cn=%s))",
+ Enabled: true,
+ },
+ },
+ },
+ // case 2
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "zzzzz",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "Unknown security protocol name: zzzzz",
+ },
+ // case 3
+ {
+ args: []string{
+ "ldap-test",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "name is not set",
+ },
+ // case 4
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "security-protocol is not set",
+ },
+ // case 5
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "unencrypted",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "host is not set",
+ },
+ // case 6
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "port is not set",
+ },
+ // case 7
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--email-attribute", "mail",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "user-filter is not set",
+ },
+ // case 8
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ errMsg: "email-attribute is not set",
+ },
+ // case 9
+ {
+ args: []string{
+ "ldap-test",
+ "--name", "ldap (simple auth) source",
+ "--security-protocol", "unencrypted",
+ "--host", "ldap-server",
+ "--port", "123",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ "--email-attribute", "mail",
+ },
+ errMsg: "user-dn is not set",
+ },
+ }
+
+ for n, c := range cases {
+ // Mock functions.
+ var createdAuthSource *auth.Source
+ service := &authService{
+ initDB: func(context.Context) error {
+ return nil
+ },
+ createAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ createdAuthSource = authSource
+ return nil
+ },
+ updateAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ assert.FailNow(t, "case %d: should not call updateAuthSource", n)
+ return nil
+ },
+ getAuthSourceByID: func(ctx context.Context, id int64) (*auth.Source, error) {
+ assert.FailNow(t, "case %d: should not call getAuthSourceByID", n)
+ return nil, nil
+ },
+ }
+
+ // Create a copy of command to test
+ app := cli.NewApp()
+ app.Flags = microcmdAuthAddLdapSimpleAuth.Flags
+ app.Action = service.addLdapSimpleAuth
+
+ // Run it
+ err := app.Run(c.args)
+ if c.errMsg != "" {
+ assert.EqualError(t, err, c.errMsg, "case %d: error should match", n)
+ } else {
+ require.NoError(t, err, "case %d: should have no errors", n)
+ assert.Equal(t, c.authSource, createdAuthSource, "case %d: wrong authSource", n)
+ }
+ }
+}
+
+func TestUpdateLdapBindDn(t *testing.T) {
+ // Mock cli functions to do not exit on error
+ osExiter := cli.OsExiter
+ defer func() { cli.OsExiter = osExiter }()
+ cli.OsExiter = func(code int) {}
+
+ // Test cases
+ cases := []struct {
+ args []string
+ id int64
+ existingAuthSource *auth.Source
+ authSource *auth.Source
+ errMsg string
+ }{
+ // case 0
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "23",
+ "--name", "ldap (via Bind DN) source full",
+ "--not-active",
+ "--security-protocol", "LDAPS",
+ "--skip-tls-verify",
+ "--host", "ldap-bind-server full",
+ "--port", "9876",
+ "--user-search-base", "ou=Users,dc=full-domain-bind,dc=org",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--restricted-filter", "(memberOf=cn=restricted-group,ou=example,dc=full-domain-bind,dc=org)",
+ "--username-attribute", "uid-bind full",
+ "--firstname-attribute", "givenName-bind full",
+ "--surname-attribute", "sn-bind full",
+ "--email-attribute", "mail-bind full",
+ "--public-ssh-key-attribute", "publickey-bind full",
+ "--avatar-attribute", "avatar-bind full",
+ "--bind-dn", "cn=readonly,dc=full-domain-bind,dc=org",
+ "--bind-password", "secret-bind-full",
+ "--synchronize-users",
+ "--page-size", "99",
+ },
+ id: 23,
+ existingAuthSource: &auth.Source{
+ Type: auth.LDAP,
+ IsActive: true,
+ Cfg: &ldap.Source{
+ Enabled: true,
+ },
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Name: "ldap (via Bind DN) source full",
+ IsActive: false,
+ IsSyncEnabled: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) source full",
+ Host: "ldap-bind-server full",
+ Port: 9876,
+ SecurityProtocol: ldap.SecurityProtocol(1),
+ SkipVerify: true,
+ BindDN: "cn=readonly,dc=full-domain-bind,dc=org",
+ BindPassword: "secret-bind-full",
+ UserBase: "ou=Users,dc=full-domain-bind,dc=org",
+ AttributeUsername: "uid-bind full",
+ AttributeName: "givenName-bind full",
+ AttributeSurname: "sn-bind full",
+ AttributeMail: "mail-bind full",
+ AttributesInBind: false,
+ AttributeSSHPublicKey: "publickey-bind full",
+ AttributeAvatar: "avatar-bind full",
+ SearchPageSize: 99,
+ Filter: "(memberOf=cn=user-group,ou=example,dc=full-domain-bind,dc=org)",
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=full-domain-bind,dc=org)",
+ RestrictedFilter: "(memberOf=cn=restricted-group,ou=example,dc=full-domain-bind,dc=org)",
+ Enabled: true,
+ },
+ },
+ },
+ // case 1
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{},
+ },
+ },
+ // case 2
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--name", "ldap (via Bind DN) source",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Name: "ldap (via Bind DN) source",
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) source",
+ },
+ },
+ },
+ // case 3
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--not-active",
+ },
+ existingAuthSource: &auth.Source{
+ Type: auth.LDAP,
+ IsActive: true,
+ Cfg: &ldap.Source{},
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ IsActive: false,
+ Cfg: &ldap.Source{},
+ },
+ },
+ // case 4
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--security-protocol", "LDAPS",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ SecurityProtocol: ldap.SecurityProtocol(1),
+ },
+ },
+ },
+ // case 5
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--skip-tls-verify",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ SkipVerify: true,
+ },
+ },
+ },
+ // case 6
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--host", "ldap-server",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ Host: "ldap-server",
+ },
+ },
+ },
+ // case 7
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--port", "389",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ Port: 389,
+ },
+ },
+ },
+ // case 8
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ UserBase: "ou=Users,dc=domain,dc=org",
+ },
+ },
+ },
+ // case 9
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--user-filter", "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ Filter: "(memberOf=cn=user-group,ou=example,dc=domain,dc=org)",
+ },
+ },
+ },
+ // case 10
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=domain,dc=org)",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=domain,dc=org)",
+ },
+ },
+ },
+ // case 11
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--username-attribute", "uid",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributeUsername: "uid",
+ },
+ },
+ },
+ // case 12
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--firstname-attribute", "givenName",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributeName: "givenName",
+ },
+ },
+ },
+ // case 13
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--surname-attribute", "sn",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributeSurname: "sn",
+ },
+ },
+ },
+ // case 14
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--email-attribute", "mail",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributeMail: "mail",
+ },
+ },
+ },
+ // case 15
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--attributes-in-bind",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributesInBind: true,
+ },
+ },
+ },
+ // case 16
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--public-ssh-key-attribute", "publickey",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ AttributeSSHPublicKey: "publickey",
+ },
+ },
+ },
+ // case 17
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--bind-dn", "cn=readonly,dc=domain,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ BindDN: "cn=readonly,dc=domain,dc=org",
+ },
+ },
+ },
+ // case 18
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--bind-password", "secret",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ BindPassword: "secret",
+ },
+ },
+ },
+ // case 19
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--synchronize-users",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ IsSyncEnabled: true,
+ Cfg: &ldap.Source{},
+ },
+ },
+ // case 20
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--page-size", "12",
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{
+ SearchPageSize: 12,
+ },
+ },
+ },
+ // case 21
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--security-protocol", "xxxxx",
+ },
+ errMsg: "Unknown security protocol name: xxxxx",
+ },
+ // case 22
+ {
+ args: []string{
+ "ldap-test",
+ },
+ errMsg: "id is not set",
+ },
+ // case 23
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ },
+ existingAuthSource: &auth.Source{
+ Type: auth.OAuth2,
+ Cfg: &ldap.Source{},
+ },
+ errMsg: "Invalid authentication type. expected: LDAP (via BindDN), actual: OAuth2",
+ },
+ // case 24
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "24",
+ "--name", "ldap (via Bind DN) flip 'active' and 'user sync' attributes",
+ "--active",
+ "--disable-synchronize-users",
+ },
+ id: 24,
+ existingAuthSource: &auth.Source{
+ Type: auth.LDAP,
+ IsActive: false,
+ IsSyncEnabled: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) flip 'active' and 'user sync' attributes",
+ Enabled: true,
+ },
+ },
+ authSource: &auth.Source{
+ Type: auth.LDAP,
+ Name: "ldap (via Bind DN) flip 'active' and 'user sync' attributes",
+ IsActive: true,
+ IsSyncEnabled: false,
+ Cfg: &ldap.Source{
+ Name: "ldap (via Bind DN) flip 'active' and 'user sync' attributes",
+ Enabled: true,
+ },
+ },
+ },
+ }
+
+ for n, c := range cases {
+ // Mock functions.
+ var updatedAuthSource *auth.Source
+ service := &authService{
+ initDB: func(context.Context) error {
+ return nil
+ },
+ createAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ assert.FailNow(t, "case %d: should not call createAuthSource", n)
+ return nil
+ },
+ updateAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ updatedAuthSource = authSource
+ return nil
+ },
+ getAuthSourceByID: func(ctx context.Context, id int64) (*auth.Source, error) {
+ if c.id != 0 {
+ assert.Equal(t, c.id, id, "case %d: wrong id", n)
+ }
+ if c.existingAuthSource != nil {
+ return c.existingAuthSource, nil
+ }
+ return &auth.Source{
+ Type: auth.LDAP,
+ Cfg: &ldap.Source{},
+ }, nil
+ },
+ }
+
+ // Create a copy of command to test
+ app := cli.NewApp()
+ app.Flags = microcmdAuthUpdateLdapBindDn.Flags
+ app.Action = service.updateLdapBindDn
+
+ // Run it
+ err := app.Run(c.args)
+ if c.errMsg != "" {
+ assert.EqualError(t, err, c.errMsg, "case %d: error should match", n)
+ } else {
+ require.NoError(t, err, "case %d: should have no errors", n)
+ assert.Equal(t, c.authSource, updatedAuthSource, "case %d: wrong authSource", n)
+ }
+ }
+}
+
+func TestUpdateLdapSimpleAuth(t *testing.T) {
+ // Mock cli functions to do not exit on error
+ osExiter := cli.OsExiter
+ defer func() { cli.OsExiter = osExiter }()
+ cli.OsExiter = func(code int) {}
+
+ // Test cases
+ cases := []struct {
+ args []string
+ id int64
+ existingAuthSource *auth.Source
+ authSource *auth.Source
+ errMsg string
+ }{
+ // case 0
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "7",
+ "--name", "ldap (simple auth) source full",
+ "--not-active",
+ "--security-protocol", "starttls",
+ "--skip-tls-verify",
+ "--host", "ldap-simple-server full",
+ "--port", "987",
+ "--user-search-base", "ou=Users,dc=full-domain-simple,dc=org",
+ "--user-filter", "(&(objectClass=posixAccount)(full-simple-cn=%s))",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=full-domain-simple,dc=org)",
+ "--restricted-filter", "(memberOf=cn=restricted-group,ou=example,dc=full-domain-simple,dc=org)",
+ "--username-attribute", "uid-simple full",
+ "--firstname-attribute", "givenName-simple full",
+ "--surname-attribute", "sn-simple full",
+ "--email-attribute", "mail-simple full",
+ "--public-ssh-key-attribute", "publickey-simple full",
+ "--avatar-attribute", "avatar-simple full",
+ "--user-dn", "cn=%s,ou=Users,dc=full-domain-simple,dc=org",
+ },
+ id: 7,
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Name: "ldap (simple auth) source full",
+ IsActive: false,
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) source full",
+ Host: "ldap-simple-server full",
+ Port: 987,
+ SecurityProtocol: ldap.SecurityProtocol(2),
+ SkipVerify: true,
+ UserDN: "cn=%s,ou=Users,dc=full-domain-simple,dc=org",
+ UserBase: "ou=Users,dc=full-domain-simple,dc=org",
+ AttributeUsername: "uid-simple full",
+ AttributeName: "givenName-simple full",
+ AttributeSurname: "sn-simple full",
+ AttributeMail: "mail-simple full",
+ AttributeSSHPublicKey: "publickey-simple full",
+ AttributeAvatar: "avatar-simple full",
+ Filter: "(&(objectClass=posixAccount)(full-simple-cn=%s))",
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=full-domain-simple,dc=org)",
+ RestrictedFilter: "(memberOf=cn=restricted-group,ou=example,dc=full-domain-simple,dc=org)",
+ },
+ },
+ },
+ // case 1
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{},
+ },
+ },
+ // case 2
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--name", "ldap (simple auth) source",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Name: "ldap (simple auth) source",
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) source",
+ },
+ },
+ },
+ // case 3
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--not-active",
+ },
+ existingAuthSource: &auth.Source{
+ Type: auth.DLDAP,
+ IsActive: true,
+ Cfg: &ldap.Source{},
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ IsActive: false,
+ Cfg: &ldap.Source{},
+ },
+ },
+ // case 4
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--security-protocol", "starttls",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ SecurityProtocol: ldap.SecurityProtocol(2),
+ },
+ },
+ },
+ // case 5
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--skip-tls-verify",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ SkipVerify: true,
+ },
+ },
+ },
+ // case 6
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--host", "ldap-server",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ Host: "ldap-server",
+ },
+ },
+ },
+ // case 7
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--port", "987",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ Port: 987,
+ },
+ },
+ },
+ // case 8
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--user-search-base", "ou=Users,dc=domain,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ UserBase: "ou=Users,dc=domain,dc=org",
+ },
+ },
+ },
+ // case 9
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--user-filter", "(&(objectClass=posixAccount)(cn=%s))",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ Filter: "(&(objectClass=posixAccount)(cn=%s))",
+ },
+ },
+ },
+ // case 10
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--admin-filter", "(memberOf=cn=admin-group,ou=example,dc=domain,dc=org)",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AdminFilter: "(memberOf=cn=admin-group,ou=example,dc=domain,dc=org)",
+ },
+ },
+ },
+ // case 11
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--username-attribute", "uid",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AttributeUsername: "uid",
+ },
+ },
+ },
+ // case 12
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--firstname-attribute", "givenName",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AttributeName: "givenName",
+ },
+ },
+ },
+ // case 13
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--surname-attribute", "sn",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AttributeSurname: "sn",
+ },
+ },
+ },
+ // case 14
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--email-attribute", "mail",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AttributeMail: "mail",
+ },
+ },
+ },
+ // case 15
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--public-ssh-key-attribute", "publickey",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ AttributeSSHPublicKey: "publickey",
+ },
+ },
+ },
+ // case 16
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--user-dn", "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{
+ UserDN: "cn=%s,ou=Users,dc=domain,dc=org",
+ },
+ },
+ },
+ // case 17
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ "--security-protocol", "xxxxx",
+ },
+ errMsg: "Unknown security protocol name: xxxxx",
+ },
+ // case 18
+ {
+ args: []string{
+ "ldap-test",
+ },
+ errMsg: "id is not set",
+ },
+ // case 19
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "1",
+ },
+ existingAuthSource: &auth.Source{
+ Type: auth.PAM,
+ Cfg: &ldap.Source{},
+ },
+ errMsg: "Invalid authentication type. expected: LDAP (simple auth), actual: PAM",
+ },
+ // case 20
+ {
+ args: []string{
+ "ldap-test",
+ "--id", "20",
+ "--name", "ldap (simple auth) flip 'active' attribute",
+ "--active",
+ },
+ id: 20,
+ existingAuthSource: &auth.Source{
+ Type: auth.DLDAP,
+ IsActive: false,
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) flip 'active' attribute",
+ Enabled: true,
+ },
+ },
+ authSource: &auth.Source{
+ Type: auth.DLDAP,
+ Name: "ldap (simple auth) flip 'active' attribute",
+ IsActive: true,
+ Cfg: &ldap.Source{
+ Name: "ldap (simple auth) flip 'active' attribute",
+ Enabled: true,
+ },
+ },
+ },
+ }
+
+ for n, c := range cases {
+ // Mock functions.
+ var updatedAuthSource *auth.Source
+ service := &authService{
+ initDB: func(context.Context) error {
+ return nil
+ },
+ createAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ assert.FailNow(t, "case %d: should not call createAuthSource", n)
+ return nil
+ },
+ updateAuthSource: func(ctx context.Context, authSource *auth.Source) error {
+ updatedAuthSource = authSource
+ return nil
+ },
+ getAuthSourceByID: func(ctx context.Context, id int64) (*auth.Source, error) {
+ if c.id != 0 {
+ assert.Equal(t, c.id, id, "case %d: wrong id", n)
+ }
+ if c.existingAuthSource != nil {
+ return c.existingAuthSource, nil
+ }
+ return &auth.Source{
+ Type: auth.DLDAP,
+ Cfg: &ldap.Source{},
+ }, nil
+ },
+ }
+
+ // Create a copy of command to test
+ app := cli.NewApp()
+ app.Flags = microcmdAuthUpdateLdapSimpleAuth.Flags
+ app.Action = service.updateLdapSimpleAuth
+
+ // Run it
+ err := app.Run(c.args)
+ if c.errMsg != "" {
+ assert.EqualError(t, err, c.errMsg, "case %d: error should match", n)
+ } else {
+ require.NoError(t, err, "case %d: should have no errors", n)
+ assert.Equal(t, c.authSource, updatedAuthSource, "case %d: wrong authSource", n)
+ }
+ }
+}
diff --git a/cmd/admin_auth_oauth.go b/cmd/admin_auth_oauth.go
new file mode 100644
index 0000000..8e6239a
--- /dev/null
+++ b/cmd/admin_auth_oauth.go
@@ -0,0 +1,299 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "net/url"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/services/auth/source/oauth2"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ oauthCLIFlags = []cli.Flag{
+ &cli.StringFlag{
+ Name: "name",
+ Value: "",
+ Usage: "Application Name",
+ },
+ &cli.StringFlag{
+ Name: "provider",
+ Value: "",
+ Usage: "OAuth2 Provider",
+ },
+ &cli.StringFlag{
+ Name: "key",
+ Value: "",
+ Usage: "Client ID (Key)",
+ },
+ &cli.StringFlag{
+ Name: "secret",
+ Value: "",
+ Usage: "Client Secret",
+ },
+ &cli.StringFlag{
+ Name: "auto-discover-url",
+ Value: "",
+ Usage: "OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider)",
+ },
+ &cli.StringFlag{
+ Name: "use-custom-urls",
+ Value: "false",
+ Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
+ },
+ &cli.StringFlag{
+ Name: "custom-tenant-id",
+ Value: "",
+ Usage: "Use custom Tenant ID for OAuth endpoints",
+ },
+ &cli.StringFlag{
+ Name: "custom-auth-url",
+ Value: "",
+ Usage: "Use a custom Authorization URL (option for GitLab/GitHub)",
+ },
+ &cli.StringFlag{
+ Name: "custom-token-url",
+ Value: "",
+ Usage: "Use a custom Token URL (option for GitLab/GitHub)",
+ },
+ &cli.StringFlag{
+ Name: "custom-profile-url",
+ Value: "",
+ Usage: "Use a custom Profile URL (option for GitLab/GitHub)",
+ },
+ &cli.StringFlag{
+ Name: "custom-email-url",
+ Value: "",
+ Usage: "Use a custom Email URL (option for GitHub)",
+ },
+ &cli.StringFlag{
+ Name: "icon-url",
+ Value: "",
+ Usage: "Custom icon URL for OAuth2 login source",
+ },
+ &cli.BoolFlag{
+ Name: "skip-local-2fa",
+ Usage: "Set to true to skip local 2fa for users authenticated by this source",
+ },
+ &cli.StringSliceFlag{
+ Name: "scopes",
+ Value: nil,
+ Usage: "Scopes to request when to authenticate against this OAuth2 source",
+ },
+ &cli.StringFlag{
+ Name: "required-claim-name",
+ Value: "",
+ Usage: "Claim name that has to be set to allow users to login with this source",
+ },
+ &cli.StringFlag{
+ Name: "required-claim-value",
+ Value: "",
+ Usage: "Claim value that has to be set to allow users to login with this source",
+ },
+ &cli.StringFlag{
+ Name: "group-claim-name",
+ Value: "",
+ Usage: "Claim name providing group names for this source",
+ },
+ &cli.StringFlag{
+ Name: "admin-group",
+ Value: "",
+ Usage: "Group Claim value for administrator users",
+ },
+ &cli.StringFlag{
+ Name: "restricted-group",
+ Value: "",
+ Usage: "Group Claim value for restricted users",
+ },
+ &cli.StringFlag{
+ Name: "group-team-map",
+ Value: "",
+ Usage: "JSON mapping between groups and org teams",
+ },
+ &cli.BoolFlag{
+ Name: "group-team-map-removal",
+ Usage: "Activate automatic team membership removal depending on groups",
+ },
+ }
+
+ microcmdAuthAddOauth = &cli.Command{
+ Name: "add-oauth",
+ Usage: "Add new Oauth authentication source",
+ Action: runAddOauth,
+ Flags: oauthCLIFlags,
+ }
+
+ microcmdAuthUpdateOauth = &cli.Command{
+ Name: "update-oauth",
+ Usage: "Update existing Oauth authentication source",
+ Action: runUpdateOauth,
+ Flags: append(oauthCLIFlags[:1], append([]cli.Flag{idFlag}, oauthCLIFlags[1:]...)...),
+ }
+)
+
+func parseOAuth2Config(c *cli.Context) *oauth2.Source {
+ var customURLMapping *oauth2.CustomURLMapping
+ if c.IsSet("use-custom-urls") {
+ customURLMapping = &oauth2.CustomURLMapping{
+ TokenURL: c.String("custom-token-url"),
+ AuthURL: c.String("custom-auth-url"),
+ ProfileURL: c.String("custom-profile-url"),
+ EmailURL: c.String("custom-email-url"),
+ Tenant: c.String("custom-tenant-id"),
+ }
+ } else {
+ customURLMapping = nil
+ }
+ return &oauth2.Source{
+ Provider: c.String("provider"),
+ ClientID: c.String("key"),
+ ClientSecret: c.String("secret"),
+ OpenIDConnectAutoDiscoveryURL: c.String("auto-discover-url"),
+ CustomURLMapping: customURLMapping,
+ IconURL: c.String("icon-url"),
+ SkipLocalTwoFA: c.Bool("skip-local-2fa"),
+ Scopes: c.StringSlice("scopes"),
+ RequiredClaimName: c.String("required-claim-name"),
+ RequiredClaimValue: c.String("required-claim-value"),
+ GroupClaimName: c.String("group-claim-name"),
+ AdminGroup: c.String("admin-group"),
+ RestrictedGroup: c.String("restricted-group"),
+ GroupTeamMap: c.String("group-team-map"),
+ GroupTeamMapRemoval: c.Bool("group-team-map-removal"),
+ }
+}
+
+func runAddOauth(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ config := parseOAuth2Config(c)
+ if config.Provider == "openidConnect" {
+ discoveryURL, err := url.Parse(config.OpenIDConnectAutoDiscoveryURL)
+ if err != nil || (discoveryURL.Scheme != "http" && discoveryURL.Scheme != "https") {
+ return fmt.Errorf("invalid Auto Discovery URL: %s (this must be a valid URL starting with http:// or https://)", config.OpenIDConnectAutoDiscoveryURL)
+ }
+ }
+
+ return auth_model.CreateSource(ctx, &auth_model.Source{
+ Type: auth_model.OAuth2,
+ Name: c.String("name"),
+ IsActive: true,
+ Cfg: config,
+ })
+}
+
+func runUpdateOauth(c *cli.Context) error {
+ if !c.IsSet("id") {
+ return errors.New("--id flag is missing")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
+ if err != nil {
+ return err
+ }
+
+ oAuth2Config := source.Cfg.(*oauth2.Source)
+
+ if c.IsSet("name") {
+ source.Name = c.String("name")
+ }
+
+ if c.IsSet("provider") {
+ oAuth2Config.Provider = c.String("provider")
+ }
+
+ if c.IsSet("key") {
+ oAuth2Config.ClientID = c.String("key")
+ }
+
+ if c.IsSet("secret") {
+ oAuth2Config.ClientSecret = c.String("secret")
+ }
+
+ if c.IsSet("auto-discover-url") {
+ oAuth2Config.OpenIDConnectAutoDiscoveryURL = c.String("auto-discover-url")
+ }
+
+ if c.IsSet("icon-url") {
+ oAuth2Config.IconURL = c.String("icon-url")
+ }
+
+ if c.IsSet("scopes") {
+ oAuth2Config.Scopes = c.StringSlice("scopes")
+ }
+
+ if c.IsSet("required-claim-name") {
+ oAuth2Config.RequiredClaimName = c.String("required-claim-name")
+ }
+ if c.IsSet("required-claim-value") {
+ oAuth2Config.RequiredClaimValue = c.String("required-claim-value")
+ }
+
+ if c.IsSet("group-claim-name") {
+ oAuth2Config.GroupClaimName = c.String("group-claim-name")
+ }
+ if c.IsSet("admin-group") {
+ oAuth2Config.AdminGroup = c.String("admin-group")
+ }
+ if c.IsSet("restricted-group") {
+ oAuth2Config.RestrictedGroup = c.String("restricted-group")
+ }
+ if c.IsSet("group-team-map") {
+ oAuth2Config.GroupTeamMap = c.String("group-team-map")
+ }
+ if c.IsSet("group-team-map-removal") {
+ oAuth2Config.GroupTeamMapRemoval = c.Bool("group-team-map-removal")
+ }
+
+ // update custom URL mapping
+ customURLMapping := &oauth2.CustomURLMapping{}
+
+ if oAuth2Config.CustomURLMapping != nil {
+ customURLMapping.TokenURL = oAuth2Config.CustomURLMapping.TokenURL
+ customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
+ customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
+ customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
+ customURLMapping.Tenant = oAuth2Config.CustomURLMapping.Tenant
+ }
+ if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
+ customURLMapping.TokenURL = c.String("custom-token-url")
+ }
+
+ if c.IsSet("use-custom-urls") && c.IsSet("custom-auth-url") {
+ customURLMapping.AuthURL = c.String("custom-auth-url")
+ }
+
+ if c.IsSet("use-custom-urls") && c.IsSet("custom-profile-url") {
+ customURLMapping.ProfileURL = c.String("custom-profile-url")
+ }
+
+ if c.IsSet("use-custom-urls") && c.IsSet("custom-email-url") {
+ customURLMapping.EmailURL = c.String("custom-email-url")
+ }
+
+ if c.IsSet("use-custom-urls") && c.IsSet("custom-tenant-id") {
+ customURLMapping.Tenant = c.String("custom-tenant-id")
+ }
+
+ oAuth2Config.CustomURLMapping = customURLMapping
+ source.Cfg = oAuth2Config
+
+ return auth_model.UpdateSource(ctx, source)
+}
diff --git a/cmd/admin_auth_stmp.go b/cmd/admin_auth_stmp.go
new file mode 100644
index 0000000..d724746
--- /dev/null
+++ b/cmd/admin_auth_stmp.go
@@ -0,0 +1,200 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "strings"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/auth/source/smtp"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ smtpCLIFlags = []cli.Flag{
+ &cli.StringFlag{
+ Name: "name",
+ Value: "",
+ Usage: "Application Name",
+ },
+ &cli.StringFlag{
+ Name: "auth-type",
+ Value: "PLAIN",
+ Usage: "SMTP Authentication Type (PLAIN/LOGIN/CRAM-MD5) default PLAIN",
+ },
+ &cli.StringFlag{
+ Name: "host",
+ Value: "",
+ Usage: "SMTP Host",
+ },
+ &cli.IntFlag{
+ Name: "port",
+ Usage: "SMTP Port",
+ },
+ &cli.BoolFlag{
+ Name: "force-smtps",
+ Usage: "SMTPS is always used on port 465. Set this to force SMTPS on other ports.",
+ Value: true,
+ },
+ &cli.BoolFlag{
+ Name: "skip-verify",
+ Usage: "Skip TLS verify.",
+ Value: true,
+ },
+ &cli.StringFlag{
+ Name: "helo-hostname",
+ Value: "",
+ Usage: "Hostname sent with HELO. Leave blank to send current hostname",
+ },
+ &cli.BoolFlag{
+ Name: "disable-helo",
+ Usage: "Disable SMTP helo.",
+ Value: true,
+ },
+ &cli.StringFlag{
+ Name: "allowed-domains",
+ Value: "",
+ Usage: "Leave empty to allow all domains. Separate multiple domains with a comma (',')",
+ },
+ &cli.BoolFlag{
+ Name: "skip-local-2fa",
+ Usage: "Skip 2FA to log on.",
+ Value: true,
+ },
+ &cli.BoolFlag{
+ Name: "active",
+ Usage: "This Authentication Source is Activated.",
+ Value: true,
+ },
+ }
+
+ microcmdAuthAddSMTP = &cli.Command{
+ Name: "add-smtp",
+ Usage: "Add new SMTP authentication source",
+ Action: runAddSMTP,
+ Flags: smtpCLIFlags,
+ }
+
+ microcmdAuthUpdateSMTP = &cli.Command{
+ Name: "update-smtp",
+ Usage: "Update existing SMTP authentication source",
+ Action: runUpdateSMTP,
+ Flags: append(smtpCLIFlags[:1], append([]cli.Flag{idFlag}, smtpCLIFlags[1:]...)...),
+ }
+)
+
+func parseSMTPConfig(c *cli.Context, conf *smtp.Source) error {
+ if c.IsSet("auth-type") {
+ conf.Auth = c.String("auth-type")
+ validAuthTypes := []string{"PLAIN", "LOGIN", "CRAM-MD5"}
+ if !util.SliceContainsString(validAuthTypes, strings.ToUpper(c.String("auth-type"))) {
+ return errors.New("Auth must be one of PLAIN/LOGIN/CRAM-MD5")
+ }
+ conf.Auth = c.String("auth-type")
+ }
+ if c.IsSet("host") {
+ conf.Host = c.String("host")
+ }
+ if c.IsSet("port") {
+ conf.Port = c.Int("port")
+ }
+ if c.IsSet("allowed-domains") {
+ conf.AllowedDomains = c.String("allowed-domains")
+ }
+ if c.IsSet("force-smtps") {
+ conf.ForceSMTPS = c.Bool("force-smtps")
+ }
+ if c.IsSet("skip-verify") {
+ conf.SkipVerify = c.Bool("skip-verify")
+ }
+ if c.IsSet("helo-hostname") {
+ conf.HeloHostname = c.String("helo-hostname")
+ }
+ if c.IsSet("disable-helo") {
+ conf.DisableHelo = c.Bool("disable-helo")
+ }
+ if c.IsSet("skip-local-2fa") {
+ conf.SkipLocalTwoFA = c.Bool("skip-local-2fa")
+ }
+ return nil
+}
+
+func runAddSMTP(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ if !c.IsSet("name") || len(c.String("name")) == 0 {
+ return errors.New("name must be set")
+ }
+ if !c.IsSet("host") || len(c.String("host")) == 0 {
+ return errors.New("host must be set")
+ }
+ if !c.IsSet("port") {
+ return errors.New("port must be set")
+ }
+ active := true
+ if c.IsSet("active") {
+ active = c.Bool("active")
+ }
+
+ var smtpConfig smtp.Source
+ if err := parseSMTPConfig(c, &smtpConfig); err != nil {
+ return err
+ }
+
+ // If not set default to PLAIN
+ if len(smtpConfig.Auth) == 0 {
+ smtpConfig.Auth = "PLAIN"
+ }
+
+ return auth_model.CreateSource(ctx, &auth_model.Source{
+ Type: auth_model.SMTP,
+ Name: c.String("name"),
+ IsActive: active,
+ Cfg: &smtpConfig,
+ })
+}
+
+func runUpdateSMTP(c *cli.Context) error {
+ if !c.IsSet("id") {
+ return errors.New("--id flag is missing")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
+ if err != nil {
+ return err
+ }
+
+ smtpConfig := source.Cfg.(*smtp.Source)
+
+ if err := parseSMTPConfig(c, smtpConfig); err != nil {
+ return err
+ }
+
+ if c.IsSet("name") {
+ source.Name = c.String("name")
+ }
+
+ if c.IsSet("active") {
+ source.IsActive = c.Bool("active")
+ }
+
+ source.Cfg = smtpConfig
+
+ return auth_model.UpdateSource(ctx, source)
+}
diff --git a/cmd/admin_regenerate.go b/cmd/admin_regenerate.go
new file mode 100644
index 0000000..0db505f
--- /dev/null
+++ b/cmd/admin_regenerate.go
@@ -0,0 +1,46 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ "code.gitea.io/gitea/modules/graceful"
+ repo_service "code.gitea.io/gitea/services/repository"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ microcmdRegenHooks = &cli.Command{
+ Name: "hooks",
+ Usage: "Regenerate git-hooks",
+ Action: runRegenerateHooks,
+ }
+
+ microcmdRegenKeys = &cli.Command{
+ Name: "keys",
+ Usage: "Regenerate authorized_keys file",
+ Action: runRegenerateKeys,
+ }
+)
+
+func runRegenerateHooks(_ *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+ return repo_service.SyncRepositoryHooks(graceful.GetManager().ShutdownContext())
+}
+
+func runRegenerateKeys(_ *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+ return asymkey_model.RewriteAllPublicKeys(ctx)
+}
diff --git a/cmd/admin_user.go b/cmd/admin_user.go
new file mode 100644
index 0000000..967a6ed
--- /dev/null
+++ b/cmd/admin_user.go
@@ -0,0 +1,21 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "github.com/urfave/cli/v2"
+)
+
+var subcmdUser = &cli.Command{
+ Name: "user",
+ Usage: "Modify users",
+ Subcommands: []*cli.Command{
+ microcmdUserCreate,
+ microcmdUserList,
+ microcmdUserChangePassword,
+ microcmdUserDelete,
+ microcmdUserGenerateAccessToken,
+ microcmdUserMustChangePassword,
+ },
+}
diff --git a/cmd/admin_user_change_password.go b/cmd/admin_user_change_password.go
new file mode 100644
index 0000000..bd9063a
--- /dev/null
+++ b/cmd/admin_user_change_password.go
@@ -0,0 +1,80 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserChangePassword = &cli.Command{
+ Name: "change-password",
+ Usage: "Change a user's password",
+ Action: runChangePassword,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"u"},
+ Value: "",
+ Usage: "The user to change password for",
+ },
+ &cli.StringFlag{
+ Name: "password",
+ Aliases: []string{"p"},
+ Value: "",
+ Usage: "New password to set for user",
+ },
+ &cli.BoolFlag{
+ Name: "must-change-password",
+ Usage: "User must change password",
+ Value: true,
+ },
+ },
+}
+
+func runChangePassword(c *cli.Context) error {
+ if err := argsSet(c, "username", "password"); err != nil {
+ return err
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ user, err := user_model.GetUserByName(ctx, c.String("username"))
+ if err != nil {
+ return err
+ }
+
+ opts := &user_service.UpdateAuthOptions{
+ Password: optional.Some(c.String("password")),
+ MustChangePassword: optional.Some(c.Bool("must-change-password")),
+ }
+ if err := user_service.UpdateAuth(ctx, user, opts); err != nil {
+ switch {
+ case errors.Is(err, password.ErrMinLength):
+ return fmt.Errorf("password is not long enough, needs to be at least %d characters", setting.MinPasswordLength)
+ case errors.Is(err, password.ErrComplexity):
+ return errors.New("password does not meet complexity requirements")
+ case errors.Is(err, password.ErrIsPwned):
+ return errors.New("the password is in a list of stolen passwords previously exposed in public data breaches, please try again with a different password, to see more details: https://haveibeenpwned.com/Passwords")
+ default:
+ return err
+ }
+ }
+
+ fmt.Printf("%s's password has been successfully updated!\n", user.Name)
+ return nil
+}
diff --git a/cmd/admin_user_create.go b/cmd/admin_user_create.go
new file mode 100644
index 0000000..dfc484a
--- /dev/null
+++ b/cmd/admin_user_create.go
@@ -0,0 +1,175 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ "code.gitea.io/gitea/models/db"
+ user_model "code.gitea.io/gitea/models/user"
+ pwd "code.gitea.io/gitea/modules/auth/password"
+ "code.gitea.io/gitea/modules/optional"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserCreate = &cli.Command{
+ Name: "create",
+ Usage: "Create a new user in database",
+ Action: runCreateUser,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "name",
+ Usage: "Username. DEPRECATED: use username instead",
+ },
+ &cli.StringFlag{
+ Name: "username",
+ Usage: "Username",
+ },
+ &cli.StringFlag{
+ Name: "password",
+ Usage: "User password",
+ },
+ &cli.StringFlag{
+ Name: "email",
+ Usage: "User email address",
+ },
+ &cli.BoolFlag{
+ Name: "admin",
+ Usage: "User is an admin",
+ },
+ &cli.BoolFlag{
+ Name: "random-password",
+ Usage: "Generate a random password for the user",
+ },
+ &cli.BoolFlag{
+ Name: "must-change-password",
+ Usage: "Set this option to false to prevent forcing the user to change their password after initial login",
+ Value: true,
+ DisableDefaultText: true,
+ },
+ &cli.IntFlag{
+ Name: "random-password-length",
+ Usage: "Length of the random password to be generated",
+ Value: 12,
+ },
+ &cli.BoolFlag{
+ Name: "access-token",
+ Usage: "Generate access token for the user",
+ },
+ &cli.BoolFlag{
+ Name: "restricted",
+ Usage: "Make a restricted user account",
+ },
+ },
+}
+
+func runCreateUser(c *cli.Context) error {
+ if err := argsSet(c, "email"); err != nil {
+ return err
+ }
+
+ if c.IsSet("name") && c.IsSet("username") {
+ return errors.New("cannot set both --name and --username flags")
+ }
+ if !c.IsSet("name") && !c.IsSet("username") {
+ return errors.New("one of --name or --username flags must be set")
+ }
+
+ if c.IsSet("password") && c.IsSet("random-password") {
+ return errors.New("cannot set both -random-password and -password flags")
+ }
+
+ var username string
+ if c.IsSet("username") {
+ username = c.String("username")
+ } else {
+ username = c.String("name")
+ _, _ = fmt.Fprintf(c.App.ErrWriter, "--name flag is deprecated. Use --username instead.\n")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ var password string
+ if c.IsSet("password") {
+ password = c.String("password")
+ } else if c.IsSet("random-password") {
+ var err error
+ password, err = pwd.Generate(c.Int("random-password-length"))
+ if err != nil {
+ return err
+ }
+ fmt.Printf("generated random password is '%s'\n", password)
+ } else {
+ return errors.New("must set either password or random-password flag")
+ }
+
+ isAdmin := c.Bool("admin")
+ mustChangePassword := true // always default to true
+ if c.IsSet("must-change-password") {
+ // if the flag is set, use the value provided by the user
+ mustChangePassword = c.Bool("must-change-password")
+ } else {
+ // check whether there are users in the database
+ hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
+ if err != nil {
+ return fmt.Errorf("IsTableNotEmpty: %w", err)
+ }
+ if !hasUserRecord {
+ // if this is the first admin being created, don't force to change password (keep the old behavior)
+ mustChangePassword = false
+ }
+ }
+
+ restricted := optional.None[bool]()
+
+ if c.IsSet("restricted") {
+ restricted = optional.Some(c.Bool("restricted"))
+ }
+
+ // default user visibility in app.ini
+ visibility := setting.Service.DefaultUserVisibilityMode
+
+ u := &user_model.User{
+ Name: username,
+ Email: c.String("email"),
+ Passwd: password,
+ IsAdmin: isAdmin,
+ MustChangePassword: mustChangePassword,
+ Visibility: visibility,
+ }
+
+ overwriteDefault := &user_model.CreateUserOverwriteOptions{
+ IsActive: optional.Some(true),
+ IsRestricted: restricted,
+ }
+
+ if err := user_model.CreateUser(ctx, u, overwriteDefault); err != nil {
+ return fmt.Errorf("CreateUser: %w", err)
+ }
+
+ if c.Bool("access-token") {
+ t := &auth_model.AccessToken{
+ Name: "gitea-admin",
+ UID: u.ID,
+ }
+
+ if err := auth_model.NewAccessToken(ctx, t); err != nil {
+ return err
+ }
+
+ fmt.Printf("Access token was successfully created... %s\n", t.Token)
+ }
+
+ fmt.Printf("New user '%s' has been successfully created!\n", username)
+ return nil
+}
diff --git a/cmd/admin_user_delete.go b/cmd/admin_user_delete.go
new file mode 100644
index 0000000..5205575
--- /dev/null
+++ b/cmd/admin_user_delete.go
@@ -0,0 +1,81 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/storage"
+ user_service "code.gitea.io/gitea/services/user"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserDelete = &cli.Command{
+ Name: "delete",
+ Usage: "Delete specific user by id, name or email",
+ Flags: []cli.Flag{
+ &cli.Int64Flag{
+ Name: "id",
+ Usage: "ID of user of the user to delete",
+ },
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"u"},
+ Usage: "Username of the user to delete",
+ },
+ &cli.StringFlag{
+ Name: "email",
+ Aliases: []string{"e"},
+ Usage: "Email of the user to delete",
+ },
+ &cli.BoolFlag{
+ Name: "purge",
+ Usage: "Purge user, all their repositories, organizations and comments",
+ },
+ },
+ Action: runDeleteUser,
+}
+
+func runDeleteUser(c *cli.Context) error {
+ if !c.IsSet("id") && !c.IsSet("username") && !c.IsSet("email") {
+ return errors.New("You must provide the id, username or email of a user to delete")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ if err := storage.Init(); err != nil {
+ return err
+ }
+
+ var err error
+ var user *user_model.User
+ if c.IsSet("email") {
+ user, err = user_model.GetUserByEmail(ctx, c.String("email"))
+ } else if c.IsSet("username") {
+ user, err = user_model.GetUserByName(ctx, c.String("username"))
+ } else {
+ user, err = user_model.GetUserByID(ctx, c.Int64("id"))
+ }
+ if err != nil {
+ return err
+ }
+ if c.IsSet("username") && user.LowerName != strings.ToLower(strings.TrimSpace(c.String("username"))) {
+ return fmt.Errorf("The user %s who has email %s does not match the provided username %s", user.Name, c.String("email"), c.String("username"))
+ }
+
+ if c.IsSet("id") && user.ID != c.Int64("id") {
+ return fmt.Errorf("The user %s does not match the provided id %d", user.Name, c.Int64("id"))
+ }
+
+ return user_service.DeleteUser(ctx, user, c.Bool("purge"))
+}
diff --git a/cmd/admin_user_generate_access_token.go b/cmd/admin_user_generate_access_token.go
new file mode 100644
index 0000000..6c2c104
--- /dev/null
+++ b/cmd/admin_user_generate_access_token.go
@@ -0,0 +1,94 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+
+ auth_model "code.gitea.io/gitea/models/auth"
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserGenerateAccessToken = &cli.Command{
+ Name: "generate-access-token",
+ Usage: "Generate an access token for a specific user",
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"u"},
+ Usage: "Username",
+ },
+ &cli.StringFlag{
+ Name: "token-name",
+ Aliases: []string{"t"},
+ Usage: "Token name",
+ Value: "gitea-admin",
+ },
+ &cli.BoolFlag{
+ Name: "raw",
+ Usage: "Display only the token value",
+ },
+ &cli.StringFlag{
+ Name: "scopes",
+ Value: "",
+ Usage: "Comma separated list of scopes to apply to access token",
+ },
+ },
+ Action: runGenerateAccessToken,
+}
+
+func runGenerateAccessToken(c *cli.Context) error {
+ if !c.IsSet("username") {
+ return errors.New("You must provide a username to generate a token for")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ user, err := user_model.GetUserByName(ctx, c.String("username"))
+ if err != nil {
+ return err
+ }
+
+ // construct token with name and user so we can make sure it is unique
+ t := &auth_model.AccessToken{
+ Name: c.String("token-name"),
+ UID: user.ID,
+ }
+
+ exist, err := auth_model.AccessTokenByNameExists(ctx, t)
+ if err != nil {
+ return err
+ }
+ if exist {
+ return errors.New("access token name has been used already")
+ }
+
+ // make sure the scopes are valid
+ accessTokenScope, err := auth_model.AccessTokenScope(c.String("scopes")).Normalize()
+ if err != nil {
+ return fmt.Errorf("invalid access token scope provided: %w", err)
+ }
+ t.Scope = accessTokenScope
+
+ // create the token
+ if err := auth_model.NewAccessToken(ctx, t); err != nil {
+ return err
+ }
+
+ if c.Bool("raw") {
+ fmt.Printf("%s\n", t.Token)
+ } else {
+ fmt.Printf("Access token was successfully created: %s\n", t.Token)
+ }
+
+ return nil
+}
diff --git a/cmd/admin_user_list.go b/cmd/admin_user_list.go
new file mode 100644
index 0000000..4c2b26d
--- /dev/null
+++ b/cmd/admin_user_list.go
@@ -0,0 +1,60 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ "os"
+ "text/tabwriter"
+
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserList = &cli.Command{
+ Name: "list",
+ Usage: "List users",
+ Action: runListUsers,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "admin",
+ Usage: "List only admin users",
+ },
+ },
+}
+
+func runListUsers(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ users, err := user_model.GetAllUsers(ctx)
+ if err != nil {
+ return err
+ }
+
+ w := tabwriter.NewWriter(os.Stdout, 5, 0, 1, ' ', 0)
+
+ if c.IsSet("admin") {
+ fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\n")
+ for _, u := range users {
+ if u.IsAdmin {
+ fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", u.ID, u.Name, u.Email, u.IsActive)
+ }
+ }
+ } else {
+ twofa := user_model.UserList(users).GetTwoFaStatus(ctx)
+ fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\tIsAdmin\t2FA\n")
+ for _, u := range users {
+ fmt.Fprintf(w, "%d\t%s\t%s\t%t\t%t\t%t\n", u.ID, u.Name, u.Email, u.IsActive, u.IsAdmin, twofa[u.ID])
+ }
+ }
+
+ w.Flush()
+ return nil
+}
diff --git a/cmd/admin_user_must_change_password.go b/cmd/admin_user_must_change_password.go
new file mode 100644
index 0000000..2794414
--- /dev/null
+++ b/cmd/admin_user_must_change_password.go
@@ -0,0 +1,60 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+
+ user_model "code.gitea.io/gitea/models/user"
+
+ "github.com/urfave/cli/v2"
+)
+
+var microcmdUserMustChangePassword = &cli.Command{
+ Name: "must-change-password",
+ Usage: "Set the must change password flag for the provided users or all users",
+ Action: runMustChangePassword,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "all",
+ Aliases: []string{"A"},
+ Usage: "All users must change password, except those explicitly excluded with --exclude",
+ },
+ &cli.StringSliceFlag{
+ Name: "exclude",
+ Aliases: []string{"e"},
+ Usage: "Do not change the must-change-password flag for these users",
+ },
+ &cli.BoolFlag{
+ Name: "unset",
+ Usage: "Instead of setting the must-change-password flag, unset it",
+ },
+ },
+}
+
+func runMustChangePassword(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if c.NArg() == 0 && !c.IsSet("all") {
+ return errors.New("either usernames or --all must be provided")
+ }
+
+ mustChangePassword := !c.Bool("unset")
+ all := c.Bool("all")
+ exclude := c.StringSlice("exclude")
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ n, err := user_model.SetMustChangePassword(ctx, all, mustChangePassword, c.Args().Slice(), exclude)
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("Updated %d users setting MustChangePassword to %t\n", n, mustChangePassword)
+ return nil
+}
diff --git a/cmd/cert.go b/cmd/cert.go
new file mode 100644
index 0000000..bf83af3
--- /dev/null
+++ b/cmd/cert.go
@@ -0,0 +1,196 @@
+// Copyright 2009 The Go Authors. All rights reserved.
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/pem"
+ "log"
+ "math/big"
+ "net"
+ "os"
+ "strings"
+ "time"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdCert represents the available cert sub-command.
+var CmdCert = &cli.Command{
+ Name: "cert",
+ Usage: "Generate self-signed certificate",
+ Description: `Generate a self-signed X.509 certificate for a TLS server.
+Outputs to 'cert.pem' and 'key.pem' and will overwrite existing files.`,
+ Action: runCert,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "host",
+ Value: "",
+ Usage: "Comma-separated hostnames and IPs to generate a certificate for",
+ },
+ &cli.StringFlag{
+ Name: "ecdsa-curve",
+ Value: "",
+ Usage: "ECDSA curve to use to generate a key. Valid values are P224, P256, P384, P521",
+ },
+ &cli.IntFlag{
+ Name: "rsa-bits",
+ Value: 3072,
+ Usage: "Size of RSA key to generate. Ignored if --ecdsa-curve is set",
+ },
+ &cli.StringFlag{
+ Name: "start-date",
+ Value: "",
+ Usage: "Creation date formatted as Jan 1 15:04:05 2011",
+ },
+ &cli.DurationFlag{
+ Name: "duration",
+ Value: 365 * 24 * time.Hour,
+ Usage: "Duration that certificate is valid for",
+ },
+ &cli.BoolFlag{
+ Name: "ca",
+ Usage: "whether this cert should be its own Certificate Authority",
+ },
+ },
+}
+
+func publicKey(priv any) any {
+ switch k := priv.(type) {
+ case *rsa.PrivateKey:
+ return &k.PublicKey
+ case *ecdsa.PrivateKey:
+ return &k.PublicKey
+ default:
+ return nil
+ }
+}
+
+func pemBlockForKey(priv any) *pem.Block {
+ switch k := priv.(type) {
+ case *rsa.PrivateKey:
+ return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
+ case *ecdsa.PrivateKey:
+ b, err := x509.MarshalECPrivateKey(k)
+ if err != nil {
+ log.Fatalf("Unable to marshal ECDSA private key: %v", err)
+ }
+ return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}
+ default:
+ return nil
+ }
+}
+
+func runCert(c *cli.Context) error {
+ if err := argsSet(c, "host"); err != nil {
+ return err
+ }
+
+ var priv any
+ var err error
+ switch c.String("ecdsa-curve") {
+ case "":
+ priv, err = rsa.GenerateKey(rand.Reader, c.Int("rsa-bits"))
+ case "P224":
+ priv, err = ecdsa.GenerateKey(elliptic.P224(), rand.Reader)
+ case "P256":
+ priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
+ case "P384":
+ priv, err = ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
+ case "P521":
+ priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
+ default:
+ log.Fatalf("Unrecognized elliptic curve: %q", c.String("ecdsa-curve"))
+ }
+ if err != nil {
+ log.Fatalf("Failed to generate private key: %v", err)
+ }
+
+ var notBefore time.Time
+ if startDate := c.String("start-date"); startDate != "" {
+ notBefore, err = time.Parse("Jan 2 15:04:05 2006", startDate)
+ if err != nil {
+ log.Fatalf("Failed to parse creation date: %v", err)
+ }
+ } else {
+ notBefore = time.Now()
+ }
+
+ notAfter := notBefore.Add(c.Duration("duration"))
+
+ serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
+ serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
+ if err != nil {
+ log.Fatalf("Failed to generate serial number: %v", err)
+ }
+
+ template := x509.Certificate{
+ SerialNumber: serialNumber,
+ Subject: pkix.Name{
+ Organization: []string{"Acme Co"},
+ CommonName: "Forgejo",
+ },
+ NotBefore: notBefore,
+ NotAfter: notAfter,
+
+ KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
+ ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
+ BasicConstraintsValid: true,
+ }
+
+ hosts := strings.Split(c.String("host"), ",")
+ for _, h := range hosts {
+ if ip := net.ParseIP(h); ip != nil {
+ template.IPAddresses = append(template.IPAddresses, ip)
+ } else {
+ template.DNSNames = append(template.DNSNames, h)
+ }
+ }
+
+ if c.Bool("ca") {
+ template.IsCA = true
+ template.KeyUsage |= x509.KeyUsageCertSign
+ }
+
+ derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)
+ if err != nil {
+ log.Fatalf("Failed to create certificate: %v", err)
+ }
+
+ certOut, err := os.Create("cert.pem")
+ if err != nil {
+ log.Fatalf("Failed to open cert.pem for writing: %v", err)
+ }
+ err = pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes})
+ if err != nil {
+ log.Fatalf("Failed to encode certificate: %v", err)
+ }
+ err = certOut.Close()
+ if err != nil {
+ log.Fatalf("Failed to write cert: %v", err)
+ }
+ log.Println("Written cert.pem")
+
+ keyOut, err := os.OpenFile("key.pem", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
+ if err != nil {
+ log.Fatalf("Failed to open key.pem for writing: %v", err)
+ }
+ err = pem.Encode(keyOut, pemBlockForKey(priv))
+ if err != nil {
+ log.Fatalf("Failed to encode key: %v", err)
+ }
+ err = keyOut.Close()
+ if err != nil {
+ log.Fatalf("Failed to write key: %v", err)
+ }
+ log.Println("Written key.pem")
+ return nil
+}
diff --git a/cmd/cmd.go b/cmd/cmd.go
new file mode 100644
index 0000000..423dce2
--- /dev/null
+++ b/cmd/cmd.go
@@ -0,0 +1,135 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+// Package cmd provides subcommands to the gitea binary - such as "web" or
+// "admin".
+package cmd
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io"
+ "os"
+ "os/signal"
+ "strings"
+ "syscall"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/urfave/cli/v2"
+)
+
+// argsSet checks that all the required arguments are set. args is a list of
+// arguments that must be set in the passed Context.
+func argsSet(c *cli.Context, args ...string) error {
+ for _, a := range args {
+ if !c.IsSet(a) {
+ return errors.New(a + " is not set")
+ }
+
+ if util.IsEmptyString(c.String(a)) {
+ return errors.New(a + " is required")
+ }
+ }
+ return nil
+}
+
+// confirm waits for user input which confirms an action
+func confirm() (bool, error) {
+ var response string
+
+ _, err := fmt.Scanln(&response)
+ if err != nil {
+ return false, err
+ }
+
+ switch strings.ToLower(response) {
+ case "y", "yes":
+ return true, nil
+ case "n", "no":
+ return false, nil
+ default:
+ return false, errors.New(response + " isn't a correct confirmation string")
+ }
+}
+
+func initDB(ctx context.Context) error {
+ setting.MustInstalled()
+ setting.LoadDBSetting()
+ setting.InitSQLLoggersForCli(log.INFO)
+
+ if setting.Database.Type == "" {
+ log.Fatal(`Database settings are missing from the configuration file: %q.
+Ensure you are running in the correct environment or set the correct configuration file with -c.
+If this is the intended configuration file complete the [database] section.`, setting.CustomConf)
+ }
+ if err := db.InitEngine(ctx); err != nil {
+ return fmt.Errorf("unable to initialize the database using the configuration in %q. Error: %w", setting.CustomConf, err)
+ }
+ return nil
+}
+
+func installSignals() (context.Context, context.CancelFunc) {
+ ctx, cancel := context.WithCancel(context.Background())
+ go func() {
+ // install notify
+ signalChannel := make(chan os.Signal, 1)
+
+ signal.Notify(
+ signalChannel,
+ syscall.SIGINT,
+ syscall.SIGTERM,
+ )
+ select {
+ case <-signalChannel:
+ case <-ctx.Done():
+ }
+ cancel()
+ signal.Reset()
+ }()
+
+ return ctx, cancel
+}
+
+func setupConsoleLogger(level log.Level, colorize bool, out io.Writer) {
+ if out != os.Stdout && out != os.Stderr {
+ panic("setupConsoleLogger can only be used with os.Stdout or os.Stderr")
+ }
+
+ writeMode := log.WriterMode{
+ Level: level,
+ Colorize: colorize,
+ WriterOption: log.WriterConsoleOption{Stderr: out == os.Stderr},
+ }
+ writer := log.NewEventWriterConsole("console-default", writeMode)
+ log.GetManager().GetLogger(log.DEFAULT).ReplaceAllWriters(writer)
+}
+
+func globalBool(c *cli.Context, name string) bool {
+ for _, ctx := range c.Lineage() {
+ if ctx.Bool(name) {
+ return true
+ }
+ }
+ return false
+}
+
+// PrepareConsoleLoggerLevel by default, use INFO level for console logger, but some sub-commands (for git/ssh protocol) shouldn't output any log to stdout.
+// Any log appears in git stdout pipe will break the git protocol, eg: client can't push and hangs forever.
+func PrepareConsoleLoggerLevel(defaultLevel log.Level) func(*cli.Context) error {
+ return func(c *cli.Context) error {
+ level := defaultLevel
+ if globalBool(c, "quiet") {
+ level = log.FATAL
+ }
+ if globalBool(c, "debug") || globalBool(c, "verbose") {
+ level = log.TRACE
+ }
+ log.SetConsoleLogger(log.DEFAULT, "console-default", level)
+ return nil
+ }
+}
diff --git a/cmd/docs.go b/cmd/docs.go
new file mode 100644
index 0000000..1dc0980
--- /dev/null
+++ b/cmd/docs.go
@@ -0,0 +1,65 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ "os"
+ "strings"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdDocs represents the available docs sub-command.
+var CmdDocs = &cli.Command{
+ Name: "docs",
+ Usage: "Output CLI documentation",
+ Description: "A command to output Forgejo's CLI documentation, optionally to a file.",
+ Action: runDocs,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "man",
+ Usage: "Output man pages instead",
+ },
+ &cli.StringFlag{
+ Name: "output",
+ Aliases: []string{"o"},
+ Usage: "Path to output to instead of stdout (will overwrite if exists)",
+ },
+ },
+}
+
+func runDocs(ctx *cli.Context) error {
+ docs, err := ctx.App.ToMarkdown()
+ if ctx.Bool("man") {
+ docs, err = ctx.App.ToMan()
+ }
+ if err != nil {
+ return err
+ }
+
+ if !ctx.Bool("man") {
+ // Clean up markdown. The following bug was fixed in v2, but is present in v1.
+ // It affects markdown output (even though the issue is referring to man pages)
+ // https://github.com/urfave/cli/issues/1040
+ firstHashtagIndex := strings.Index(docs, "#")
+
+ if firstHashtagIndex > 0 {
+ docs = docs[firstHashtagIndex:]
+ }
+ }
+
+ out := os.Stdout
+ if ctx.String("output") != "" {
+ fi, err := os.Create(ctx.String("output"))
+ if err != nil {
+ return err
+ }
+ defer fi.Close()
+ out = fi
+ }
+
+ _, err = fmt.Fprintln(out, docs)
+ return err
+}
diff --git a/cmd/doctor.go b/cmd/doctor.go
new file mode 100644
index 0000000..9957053
--- /dev/null
+++ b/cmd/doctor.go
@@ -0,0 +1,219 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ golog "log"
+ "os"
+ "path/filepath"
+ "strings"
+ "text/tabwriter"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/migrations"
+ migrate_base "code.gitea.io/gitea/models/migrations/base"
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/doctor"
+
+ "github.com/urfave/cli/v2"
+ "xorm.io/xorm"
+)
+
+// CmdDoctor represents the available doctor sub-command.
+var CmdDoctor = &cli.Command{
+ Name: "doctor",
+ Usage: "Diagnose and optionally fix problems, convert or re-create database tables",
+ Description: "A command to diagnose problems with the current Forgejo instance according to the given configuration. Some problems can optionally be fixed by modifying the database or data storage.",
+
+ Subcommands: []*cli.Command{
+ cmdDoctorCheck,
+ cmdRecreateTable,
+ cmdDoctorConvert,
+ },
+}
+
+var cmdDoctorCheck = &cli.Command{
+ Name: "check",
+ Usage: "Diagnose and optionally fix problems",
+ Description: "A command to diagnose problems with the current Forgejo instance according to the given configuration. Some problems can optionally be fixed by modifying the database or data storage.",
+ Action: runDoctorCheck,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "list",
+ Usage: "List the available checks",
+ },
+ &cli.BoolFlag{
+ Name: "default",
+ Usage: "Run the default checks (if neither --run or --all is set, this is the default behaviour)",
+ },
+ &cli.StringSliceFlag{
+ Name: "run",
+ Usage: "Run the provided checks - (if --default is set, the default checks will also run)",
+ },
+ &cli.BoolFlag{
+ Name: "all",
+ Usage: "Run all the available checks",
+ },
+ &cli.BoolFlag{
+ Name: "fix",
+ Usage: "Automatically fix what we can",
+ },
+ &cli.StringFlag{
+ Name: "log-file",
+ Usage: `Name of the log file (no verbose log output by default). Set to "-" to output to stdout`,
+ },
+ &cli.BoolFlag{
+ Name: "color",
+ Aliases: []string{"H"},
+ Usage: "Use color for outputted information",
+ },
+ },
+}
+
+var cmdRecreateTable = &cli.Command{
+ Name: "recreate-table",
+ Usage: "Recreate tables from XORM definitions and copy the data.",
+ ArgsUsage: "[TABLE]... : (TABLEs to recreate - leave blank for all)",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ Usage: "Print SQL commands sent",
+ },
+ },
+ Description: `The database definitions Forgejo uses change across versions, sometimes changing default values and leaving old unused columns.
+
+This command will cause Xorm to recreate tables, copying over the data and deleting the old table.
+
+You should back-up your database before doing this and ensure that your database is up-to-date first.`,
+ Action: runRecreateTable,
+}
+
+func runRecreateTable(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ // Redirect the default golog to here
+ golog.SetFlags(0)
+ golog.SetPrefix("")
+ golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
+
+ debug := ctx.Bool("debug")
+ setting.MustInstalled()
+ setting.LoadDBSetting()
+
+ if debug {
+ setting.InitSQLLoggersForCli(log.DEBUG)
+ } else {
+ setting.InitSQLLoggersForCli(log.INFO)
+ }
+
+ setting.Database.LogSQL = debug
+ if err := db.InitEngine(stdCtx); err != nil {
+ fmt.Println(err)
+ fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.")
+ return nil
+ }
+
+ args := ctx.Args()
+ names := make([]string, 0, ctx.NArg())
+ for i := 0; i < ctx.NArg(); i++ {
+ names = append(names, args.Get(i))
+ }
+
+ beans, err := db.NamesToBean(names...)
+ if err != nil {
+ return err
+ }
+ recreateTables := migrate_base.RecreateTables(beans...)
+
+ return db.InitEngineWithMigration(stdCtx, func(x *xorm.Engine) error {
+ if err := migrations.EnsureUpToDate(x); err != nil {
+ return err
+ }
+ return recreateTables(x)
+ })
+}
+
+func setupDoctorDefaultLogger(ctx *cli.Context, colorize bool) {
+ // Silence the default loggers
+ setupConsoleLogger(log.FATAL, log.CanColorStderr, os.Stderr)
+
+ logFile := ctx.String("log-file")
+ if logFile == "" {
+ return // if no doctor log-file is set, do not show any log from default logger
+ } else if logFile == "-" {
+ setupConsoleLogger(log.TRACE, colorize, os.Stdout)
+ } else {
+ logFile, _ = filepath.Abs(logFile)
+ writeMode := log.WriterMode{Level: log.TRACE, WriterOption: log.WriterFileOption{FileName: logFile}}
+ writer, err := log.NewEventWriter("console-to-file", "file", writeMode)
+ if err != nil {
+ log.FallbackErrorf("unable to create file log writer: %v", err)
+ return
+ }
+ log.GetManager().GetLogger(log.DEFAULT).ReplaceAllWriters(writer)
+ }
+}
+
+func runDoctorCheck(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ colorize := log.CanColorStdout
+ if ctx.IsSet("color") {
+ colorize = ctx.Bool("color")
+ }
+
+ setupDoctorDefaultLogger(ctx, colorize)
+
+ // Finally redirect the default golang's log to here
+ golog.SetFlags(0)
+ golog.SetPrefix("")
+ golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
+
+ if ctx.IsSet("list") {
+ w := tabwriter.NewWriter(os.Stdout, 0, 8, 1, '\t', 0)
+ _, _ = w.Write([]byte("Default\tName\tTitle\n"))
+ doctor.SortChecks(doctor.Checks)
+ for _, check := range doctor.Checks {
+ if check.IsDefault {
+ _, _ = w.Write([]byte{'*'})
+ }
+ _, _ = w.Write([]byte{'\t'})
+ _, _ = w.Write([]byte(check.Name))
+ _, _ = w.Write([]byte{'\t'})
+ _, _ = w.Write([]byte(check.Title))
+ _, _ = w.Write([]byte{'\n'})
+ }
+ return w.Flush()
+ }
+
+ var checks []*doctor.Check
+ if ctx.Bool("all") {
+ checks = make([]*doctor.Check, len(doctor.Checks))
+ copy(checks, doctor.Checks)
+ } else if ctx.IsSet("run") {
+ addDefault := ctx.Bool("default")
+ runNamesSet := container.SetOf(ctx.StringSlice("run")...)
+ for _, check := range doctor.Checks {
+ if (addDefault && check.IsDefault) || runNamesSet.Contains(check.Name) {
+ checks = append(checks, check)
+ runNamesSet.Remove(check.Name)
+ }
+ }
+ if len(runNamesSet) > 0 {
+ return fmt.Errorf("unknown checks: %q", strings.Join(runNamesSet.Values(), ","))
+ }
+ } else {
+ for _, check := range doctor.Checks {
+ if check.IsDefault {
+ checks = append(checks, check)
+ }
+ }
+ }
+ return doctor.RunChecks(stdCtx, colorize, ctx.Bool("fix"), checks)
+}
diff --git a/cmd/doctor_convert.go b/cmd/doctor_convert.go
new file mode 100644
index 0000000..190b2fc
--- /dev/null
+++ b/cmd/doctor_convert.go
@@ -0,0 +1,49 @@
+// Copyright 2019 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+// cmdDoctorConvert represents the available convert sub-command.
+var cmdDoctorConvert = &cli.Command{
+ Name: "convert",
+ Usage: "Convert the database",
+ Description: "A command to convert an existing MySQL database from utf8 to utf8mb4",
+ Action: runDoctorConvert,
+}
+
+func runDoctorConvert(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(stdCtx); err != nil {
+ return err
+ }
+
+ log.Info("AppPath: %s", setting.AppPath)
+ log.Info("AppWorkPath: %s", setting.AppWorkPath)
+ log.Info("Custom path: %s", setting.CustomPath)
+ log.Info("Log path: %s", setting.Log.RootPath)
+ log.Info("Configuration file: %s", setting.CustomConf)
+
+ if setting.Database.Type.IsMySQL() {
+ if err := db.ConvertDatabaseTable(); err != nil {
+ log.Fatal("Failed to convert database & table: %v", err)
+ return err
+ }
+ fmt.Println("Converted successfully, please confirm your database's character set is now utf8mb4")
+ } else {
+ fmt.Println("This command can only be used with a MySQL database")
+ }
+
+ return nil
+}
diff --git a/cmd/doctor_test.go b/cmd/doctor_test.go
new file mode 100644
index 0000000..e6daae1
--- /dev/null
+++ b/cmd/doctor_test.go
@@ -0,0 +1,33 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "testing"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/services/doctor"
+
+ "github.com/stretchr/testify/require"
+ "github.com/urfave/cli/v2"
+)
+
+func TestDoctorRun(t *testing.T) {
+ doctor.Register(&doctor.Check{
+ Title: "Test Check",
+ Name: "test-check",
+ Run: func(ctx context.Context, logger log.Logger, autofix bool) error { return nil },
+
+ SkipDatabaseInitialization: true,
+ })
+ app := cli.NewApp()
+ app.Commands = []*cli.Command{cmdDoctorCheck}
+ err := app.Run([]string{"./gitea", "check", "--run", "test-check"})
+ require.NoError(t, err)
+ err = app.Run([]string{"./gitea", "check", "--run", "no-such"})
+ require.ErrorContains(t, err, `unknown checks: "no-such"`)
+ err = app.Run([]string{"./gitea", "check", "--run", "test-check,no-such"})
+ require.ErrorContains(t, err, `unknown checks: "no-such"`)
+}
diff --git a/cmd/dump.go b/cmd/dump.go
new file mode 100644
index 0000000..5c64199
--- /dev/null
+++ b/cmd/dump.go
@@ -0,0 +1,492 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/util"
+
+ "code.forgejo.org/go-chi/session"
+ "github.com/mholt/archiver/v3"
+ "github.com/urfave/cli/v2"
+)
+
+func addReader(w archiver.Writer, r io.ReadCloser, info os.FileInfo, customName string, verbose bool) error {
+ if verbose {
+ log.Info("Adding file %s", customName)
+ }
+
+ return w.Write(archiver.File{
+ FileInfo: archiver.FileInfo{
+ FileInfo: info,
+ CustomName: customName,
+ },
+ ReadCloser: r,
+ })
+}
+
+func addFile(w archiver.Writer, filePath, absPath string, verbose bool) error {
+ file, err := os.Open(absPath)
+ if err != nil {
+ return err
+ }
+ defer file.Close()
+ fileInfo, err := file.Stat()
+ if err != nil {
+ return err
+ }
+
+ return addReader(w, file, fileInfo, filePath, verbose)
+}
+
+func isSubdir(upper, lower string) (bool, error) {
+ if relPath, err := filepath.Rel(upper, lower); err != nil {
+ return false, err
+ } else if relPath == "." || !strings.HasPrefix(relPath, ".") {
+ return true, nil
+ }
+ return false, nil
+}
+
+type outputType struct {
+ Enum []string
+ Default string
+ selected string
+}
+
+func (o outputType) Join() string {
+ return strings.Join(o.Enum, ", ")
+}
+
+func (o *outputType) Set(value string) error {
+ for _, enum := range o.Enum {
+ if enum == value {
+ o.selected = value
+ return nil
+ }
+ }
+
+ return fmt.Errorf("allowed values are %s", o.Join())
+}
+
+func (o outputType) String() string {
+ if o.selected == "" {
+ return o.Default
+ }
+ return o.selected
+}
+
+var outputTypeEnum = &outputType{
+ Enum: []string{"zip", "tar", "tar.sz", "tar.gz", "tar.xz", "tar.bz2", "tar.br", "tar.lz4", "tar.zst"},
+ Default: "zip",
+}
+
+// CmdDump represents the available dump sub-command.
+var CmdDump = &cli.Command{
+ Name: "dump",
+ Usage: "Dump Forgejo files and database",
+ Description: `Dump compresses all related files and database into zip file.
+It can be used for backup and capture Forgejo server image to send to maintainer`,
+ Action: runDump,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "file",
+ Aliases: []string{"f"},
+ Value: fmt.Sprintf("forgejo-dump-%d.zip", time.Now().Unix()),
+ Usage: "Name of the dump file which will be created. Supply '-' for stdout. See type for available types.",
+ },
+ &cli.BoolFlag{
+ Name: "verbose",
+ Aliases: []string{"V"},
+ Usage: "Show process details",
+ },
+ &cli.BoolFlag{
+ Name: "quiet",
+ Aliases: []string{"q"},
+ Usage: "Only display warnings and errors",
+ },
+ &cli.StringFlag{
+ Name: "tempdir",
+ Aliases: []string{"t"},
+ Value: os.TempDir(),
+ Usage: "Temporary dir path",
+ },
+ &cli.StringFlag{
+ Name: "database",
+ Aliases: []string{"d"},
+ Usage: "Specify the database SQL syntax: sqlite3, mysql, postgres",
+ },
+ &cli.BoolFlag{
+ Name: "skip-repository",
+ Aliases: []string{"R"},
+ Usage: "Skip the repository dumping",
+ },
+ &cli.BoolFlag{
+ Name: "skip-log",
+ Aliases: []string{"L"},
+ Usage: "Skip the log dumping",
+ },
+ &cli.BoolFlag{
+ Name: "skip-custom-dir",
+ Usage: "Skip custom directory",
+ },
+ &cli.BoolFlag{
+ Name: "skip-lfs-data",
+ Usage: "Skip LFS data",
+ },
+ &cli.BoolFlag{
+ Name: "skip-attachment-data",
+ Usage: "Skip attachment data",
+ },
+ &cli.BoolFlag{
+ Name: "skip-package-data",
+ Usage: "Skip package data",
+ },
+ &cli.BoolFlag{
+ Name: "skip-index",
+ Usage: "Skip bleve index data",
+ },
+ &cli.GenericFlag{
+ Name: "type",
+ Value: outputTypeEnum,
+ Usage: fmt.Sprintf("Dump output format: %s", outputTypeEnum.Join()),
+ },
+ },
+}
+
+func fatal(format string, args ...any) {
+ fmt.Fprintf(os.Stderr, format+"\n", args...)
+ log.Fatal(format, args...)
+}
+
+func runDump(ctx *cli.Context) error {
+ var file *os.File
+ fileName := ctx.String("file")
+ outType := ctx.String("type")
+ if fileName == "-" {
+ file = os.Stdout
+ setupConsoleLogger(log.FATAL, log.CanColorStderr, os.Stderr)
+ } else {
+ for _, suffix := range outputTypeEnum.Enum {
+ if strings.HasSuffix(fileName, "."+suffix) {
+ fileName = strings.TrimSuffix(fileName, "."+suffix)
+ break
+ }
+ }
+ fileName += "." + outType
+ }
+ setting.MustInstalled()
+
+ // make sure we are logging to the console no matter what the configuration tells us do to
+ // FIXME: don't use CfgProvider directly
+ if _, err := setting.CfgProvider.Section("log").NewKey("MODE", "console"); err != nil {
+ fatal("Setting logging mode to console failed: %v", err)
+ }
+ if _, err := setting.CfgProvider.Section("log.console").NewKey("STDERR", "true"); err != nil {
+ fatal("Setting console logger to stderr failed: %v", err)
+ }
+
+ // Set loglevel to Warn if quiet-mode is requested
+ if ctx.Bool("quiet") {
+ if _, err := setting.CfgProvider.Section("log.console").NewKey("LEVEL", "Warn"); err != nil {
+ fatal("Setting console log-level failed: %v", err)
+ }
+ }
+
+ if !setting.InstallLock {
+ log.Error("Is '%s' really the right config path?\n", setting.CustomConf)
+ return fmt.Errorf("forgejo is not initialized")
+ }
+ setting.LoadSettings() // cannot access session settings otherwise
+
+ verbose := ctx.Bool("verbose")
+ if verbose && ctx.Bool("quiet") {
+ return fmt.Errorf("--quiet and --verbose cannot both be set")
+ }
+
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ err := db.InitEngine(stdCtx)
+ if err != nil {
+ return err
+ }
+
+ if err := storage.Init(); err != nil {
+ return err
+ }
+
+ if file == nil {
+ file, err = os.Create(fileName)
+ if err != nil {
+ fatal("Unable to open %s: %v", fileName, err)
+ }
+ }
+ defer file.Close()
+
+ absFileName, err := filepath.Abs(fileName)
+ if err != nil {
+ return err
+ }
+
+ var iface any
+ if fileName == "-" {
+ iface, err = archiver.ByExtension(fmt.Sprintf(".%s", outType))
+ } else {
+ iface, err = archiver.ByExtension(fileName)
+ }
+ if err != nil {
+ fatal("Unable to get archiver for extension: %v", err)
+ }
+
+ w, _ := iface.(archiver.Writer)
+ if err := w.Create(file); err != nil {
+ fatal("Creating archiver.Writer failed: %v", err)
+ }
+ defer w.Close()
+
+ if ctx.IsSet("skip-repository") && ctx.Bool("skip-repository") {
+ log.Info("Skip dumping local repositories")
+ } else {
+ log.Info("Dumping local repositories... %s", setting.RepoRootPath)
+ if err := addRecursiveExclude(w, "repos", setting.RepoRootPath, []string{absFileName}, verbose); err != nil {
+ fatal("Failed to include repositories: %v", err)
+ }
+
+ if ctx.IsSet("skip-lfs-data") && ctx.Bool("skip-lfs-data") {
+ log.Info("Skip dumping LFS data")
+ } else if !setting.LFS.StartServer {
+ log.Info("LFS isn't enabled. Skip dumping LFS data")
+ } else if err := storage.LFS.IterateObjects("", func(objPath string, object storage.Object) error {
+ info, err := object.Stat()
+ if err != nil {
+ return err
+ }
+
+ return addReader(w, object, info, path.Join("data", "lfs", objPath), verbose)
+ }); err != nil {
+ fatal("Failed to dump LFS objects: %v", err)
+ }
+ }
+
+ tmpDir := ctx.String("tempdir")
+ if _, err := os.Stat(tmpDir); os.IsNotExist(err) {
+ fatal("Path does not exist: %s", tmpDir)
+ }
+
+ dbDump, err := os.CreateTemp(tmpDir, "forgejo-db.sql")
+ if err != nil {
+ fatal("Failed to create tmp file: %v", err)
+ }
+ defer func() {
+ _ = dbDump.Close()
+ if err := util.Remove(dbDump.Name()); err != nil {
+ log.Warn("Unable to remove temporary file: %s: Error: %v", dbDump.Name(), err)
+ }
+ }()
+
+ targetDBType := ctx.String("database")
+ if len(targetDBType) > 0 && targetDBType != setting.Database.Type.String() {
+ log.Info("Dumping database %s => %s...", setting.Database.Type, targetDBType)
+ } else {
+ log.Info("Dumping database...")
+ }
+
+ if err := db.DumpDatabase(dbDump.Name(), targetDBType); err != nil {
+ fatal("Failed to dump database: %v", err)
+ }
+
+ if err := addFile(w, "forgejo-db.sql", dbDump.Name(), verbose); err != nil {
+ fatal("Failed to include forgejo-db.sql: %v", err)
+ }
+
+ if len(setting.CustomConf) > 0 {
+ log.Info("Adding custom configuration file from %s", setting.CustomConf)
+ if err := addFile(w, "app.ini", setting.CustomConf, verbose); err != nil {
+ fatal("Failed to include specified app.ini: %v", err)
+ }
+ }
+
+ if ctx.IsSet("skip-custom-dir") && ctx.Bool("skip-custom-dir") {
+ log.Info("Skipping custom directory")
+ } else {
+ customDir, err := os.Stat(setting.CustomPath)
+ if err == nil && customDir.IsDir() {
+ if is, _ := isSubdir(setting.AppDataPath, setting.CustomPath); !is {
+ if err := addRecursiveExclude(w, "custom", setting.CustomPath, []string{absFileName}, verbose); err != nil {
+ fatal("Failed to include custom: %v", err)
+ }
+ } else {
+ log.Info("Custom dir %s is inside data dir %s, skipped", setting.CustomPath, setting.AppDataPath)
+ }
+ } else {
+ log.Info("Custom dir %s doesn't exist, skipped", setting.CustomPath)
+ }
+ }
+
+ isExist, err := util.IsExist(setting.AppDataPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", setting.AppDataPath, err)
+ }
+ if isExist {
+ log.Info("Packing data directory...%s", setting.AppDataPath)
+
+ var excludes []string
+ if setting.SessionConfig.OriginalProvider == "file" {
+ var opts session.Options
+ if err = json.Unmarshal([]byte(setting.SessionConfig.ProviderConfig), &opts); err != nil {
+ return err
+ }
+ excludes = append(excludes, opts.ProviderConfig)
+ }
+
+ if ctx.IsSet("skip-index") && ctx.Bool("skip-index") {
+ excludes = append(excludes, setting.Indexer.RepoPath)
+ excludes = append(excludes, setting.Indexer.IssuePath)
+ }
+
+ excludes = append(excludes, setting.RepoRootPath)
+ excludes = append(excludes, setting.LFS.Storage.Path)
+ excludes = append(excludes, setting.Attachment.Storage.Path)
+ excludes = append(excludes, setting.Packages.Storage.Path)
+ excludes = append(excludes, setting.Log.RootPath)
+ excludes = append(excludes, absFileName)
+ if err := addRecursiveExclude(w, "data", setting.AppDataPath, excludes, verbose); err != nil {
+ fatal("Failed to include data directory: %v", err)
+ }
+ }
+
+ if ctx.IsSet("skip-attachment-data") && ctx.Bool("skip-attachment-data") {
+ log.Info("Skip dumping attachment data")
+ } else if err := storage.Attachments.IterateObjects("", func(objPath string, object storage.Object) error {
+ info, err := object.Stat()
+ if err != nil {
+ return err
+ }
+
+ return addReader(w, object, info, path.Join("data", "attachments", objPath), verbose)
+ }); err != nil {
+ fatal("Failed to dump attachments: %v", err)
+ }
+
+ if ctx.IsSet("skip-package-data") && ctx.Bool("skip-package-data") {
+ log.Info("Skip dumping package data")
+ } else if !setting.Packages.Enabled {
+ log.Info("Packages isn't enabled. Skip dumping package data")
+ } else if err := storage.Packages.IterateObjects("", func(objPath string, object storage.Object) error {
+ info, err := object.Stat()
+ if err != nil {
+ return err
+ }
+
+ return addReader(w, object, info, path.Join("data", "packages", objPath), verbose)
+ }); err != nil {
+ fatal("Failed to dump packages: %v", err)
+ }
+
+ // Doesn't check if LogRootPath exists before processing --skip-log intentionally,
+ // ensuring that it's clear the dump is skipped whether the directory's initialized
+ // yet or not.
+ if ctx.IsSet("skip-log") && ctx.Bool("skip-log") {
+ log.Info("Skip dumping log files")
+ } else {
+ isExist, err := util.IsExist(setting.Log.RootPath)
+ if err != nil {
+ log.Error("Unable to check if %s exists. Error: %v", setting.Log.RootPath, err)
+ }
+ if isExist {
+ if err := addRecursiveExclude(w, "log", setting.Log.RootPath, []string{absFileName}, verbose); err != nil {
+ fatal("Failed to include log: %v", err)
+ }
+ }
+ }
+
+ if fileName != "-" {
+ if err = w.Close(); err != nil {
+ _ = util.Remove(fileName)
+ fatal("Failed to save %s: %v", fileName, err)
+ }
+
+ if err := os.Chmod(fileName, 0o600); err != nil {
+ log.Info("Can't change file access permissions mask to 0600: %v", err)
+ }
+ }
+
+ if fileName != "-" {
+ log.Info("Finish dumping in file %s", fileName)
+ } else {
+ log.Info("Finish dumping to stdout")
+ }
+
+ return nil
+}
+
+// addRecursiveExclude zips absPath to specified insidePath inside writer excluding excludeAbsPath
+func addRecursiveExclude(w archiver.Writer, insidePath, absPath string, excludeAbsPath []string, verbose bool) error {
+ absPath, err := filepath.Abs(absPath)
+ if err != nil {
+ return err
+ }
+ dir, err := os.Open(absPath)
+ if err != nil {
+ return err
+ }
+ defer dir.Close()
+
+ files, err := dir.Readdir(0)
+ if err != nil {
+ return err
+ }
+ for _, file := range files {
+ currentAbsPath := filepath.Join(absPath, file.Name())
+ currentInsidePath := path.Join(insidePath, file.Name())
+
+ if util.SliceContainsString(excludeAbsPath, currentAbsPath) {
+ log.Debug("Skipping %q because matched an excluded path.", currentAbsPath)
+ continue
+ }
+
+ if file.IsDir() {
+ if err := addFile(w, currentInsidePath, currentAbsPath, false); err != nil {
+ return err
+ }
+ if err = addRecursiveExclude(w, currentInsidePath, currentAbsPath, excludeAbsPath, verbose); err != nil {
+ return err
+ }
+ } else {
+ // only copy regular files and symlink regular files, skip non-regular files like socket/pipe/...
+ shouldAdd := file.Mode().IsRegular()
+ if !shouldAdd && file.Mode()&os.ModeSymlink == os.ModeSymlink {
+ target, err := filepath.EvalSymlinks(currentAbsPath)
+ if err != nil {
+ return err
+ }
+ targetStat, err := os.Stat(target)
+ if err != nil {
+ return err
+ }
+ shouldAdd = targetStat.Mode().IsRegular()
+ }
+ if shouldAdd {
+ if err = addFile(w, currentInsidePath, currentAbsPath, verbose); err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
+}
diff --git a/cmd/dump_repo.go b/cmd/dump_repo.go
new file mode 100644
index 0000000..3a24cf6
--- /dev/null
+++ b/cmd/dump_repo.go
@@ -0,0 +1,192 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "os"
+ "strings"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ base "code.gitea.io/gitea/modules/migration"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/structs"
+ "code.gitea.io/gitea/modules/util"
+ "code.gitea.io/gitea/services/convert"
+ "code.gitea.io/gitea/services/migrations"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdDumpRepository represents the available dump repository sub-command.
+var CmdDumpRepository = &cli.Command{
+ Name: "dump-repo",
+ Usage: "Dump the repository from git/github/gitea/gitlab",
+ Description: "This is a command for dumping the repository data.",
+ Action: runDumpRepository,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "git_service",
+ Value: "",
+ Usage: "Git service, git, github, gitea, gitlab. If clone_addr could be recognized, this could be ignored.",
+ },
+ &cli.StringFlag{
+ Name: "repo_dir",
+ Aliases: []string{"r"},
+ Value: "./data",
+ Usage: "Repository dir path to store the data",
+ },
+ &cli.StringFlag{
+ Name: "clone_addr",
+ Value: "",
+ Usage: "The URL will be clone, currently could be a git/github/gitea/gitlab http/https URL",
+ },
+ &cli.StringFlag{
+ Name: "auth_username",
+ Value: "",
+ Usage: "The username to visit the clone_addr",
+ },
+ &cli.StringFlag{
+ Name: "auth_password",
+ Value: "",
+ Usage: "The password to visit the clone_addr",
+ },
+ &cli.StringFlag{
+ Name: "auth_token",
+ Value: "",
+ Usage: "The personal token to visit the clone_addr",
+ },
+ &cli.StringFlag{
+ Name: "owner_name",
+ Value: "",
+ Usage: "The data will be stored on a directory with owner name if not empty",
+ },
+ &cli.StringFlag{
+ Name: "repo_name",
+ Value: "",
+ Usage: "The data will be stored on a directory with repository name if not empty",
+ },
+ &cli.StringFlag{
+ Name: "units",
+ Value: "",
+ Usage: `Which items will be migrated, one or more units should be separated as comma.
+wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.`,
+ },
+ },
+}
+
+func runDumpRepository(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(stdCtx); err != nil {
+ return err
+ }
+
+ // migrations.GiteaLocalUploader depends on git module
+ if err := git.InitSimple(context.Background()); err != nil {
+ return err
+ }
+
+ log.Info("AppPath: %s", setting.AppPath)
+ log.Info("AppWorkPath: %s", setting.AppWorkPath)
+ log.Info("Custom path: %s", setting.CustomPath)
+ log.Info("Log path: %s", setting.Log.RootPath)
+ log.Info("Configuration file: %s", setting.CustomConf)
+
+ var (
+ serviceType structs.GitServiceType
+ cloneAddr = ctx.String("clone_addr")
+ serviceStr = ctx.String("git_service")
+ )
+
+ if strings.HasPrefix(strings.ToLower(cloneAddr), "https://github.com/") {
+ serviceStr = "github"
+ } else if strings.HasPrefix(strings.ToLower(cloneAddr), "https://gitlab.com/") {
+ serviceStr = "gitlab"
+ } else if strings.HasPrefix(strings.ToLower(cloneAddr), "https://gitea.com/") {
+ serviceStr = "gitea"
+ }
+ if serviceStr == "" {
+ return errors.New("git_service missed or clone_addr cannot be recognized")
+ }
+ serviceType = convert.ToGitServiceType(serviceStr)
+
+ opts := base.MigrateOptions{
+ GitServiceType: serviceType,
+ CloneAddr: cloneAddr,
+ AuthUsername: ctx.String("auth_username"),
+ AuthPassword: ctx.String("auth_password"),
+ AuthToken: ctx.String("auth_token"),
+ RepoName: ctx.String("repo_name"),
+ }
+
+ if len(ctx.String("units")) == 0 {
+ opts.Wiki = true
+ opts.Issues = true
+ opts.Milestones = true
+ opts.Labels = true
+ opts.Releases = true
+ opts.Comments = true
+ opts.PullRequests = true
+ opts.ReleaseAssets = true
+ } else {
+ units := strings.Split(ctx.String("units"), ",")
+ for _, unit := range units {
+ switch strings.ToLower(strings.TrimSpace(unit)) {
+ case "":
+ continue
+ case "wiki":
+ opts.Wiki = true
+ case "issues":
+ opts.Issues = true
+ case "milestones":
+ opts.Milestones = true
+ case "labels":
+ opts.Labels = true
+ case "releases":
+ opts.Releases = true
+ case "release_assets":
+ opts.ReleaseAssets = true
+ case "comments":
+ opts.Comments = true
+ case "pull_requests":
+ opts.PullRequests = true
+ default:
+ return errors.New("invalid unit: " + unit)
+ }
+ }
+ }
+
+ // the repo_dir will be removed if error occurs in DumpRepository
+ // make sure the directory doesn't exist or is empty, prevent from deleting user files
+ repoDir := ctx.String("repo_dir")
+ if exists, err := util.IsExist(repoDir); err != nil {
+ return fmt.Errorf("unable to stat repo_dir %q: %w", repoDir, err)
+ } else if exists {
+ if isDir, _ := util.IsDir(repoDir); !isDir {
+ return fmt.Errorf("repo_dir %q already exists but it's not a directory", repoDir)
+ }
+ if dir, _ := os.ReadDir(repoDir); len(dir) > 0 {
+ return fmt.Errorf("repo_dir %q is not empty", repoDir)
+ }
+ }
+
+ if err := migrations.DumpRepository(
+ context.Background(),
+ repoDir,
+ ctx.String("owner_name"),
+ opts,
+ ); err != nil {
+ log.Fatal("Failed to dump repository: %v", err)
+ return err
+ }
+
+ log.Trace("Dump finished!!!")
+
+ return nil
+}
diff --git a/cmd/dump_test.go b/cmd/dump_test.go
new file mode 100644
index 0000000..4593863
--- /dev/null
+++ b/cmd/dump_test.go
@@ -0,0 +1,118 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "io"
+ "os"
+ "testing"
+
+ "github.com/mholt/archiver/v3"
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+type mockArchiver struct {
+ addedFiles []string
+}
+
+func (mockArchiver) Create(out io.Writer) error {
+ return nil
+}
+
+func (m *mockArchiver) Write(f archiver.File) error {
+ m.addedFiles = append(m.addedFiles, f.Name())
+ return nil
+}
+
+func (mockArchiver) Close() error {
+ return nil
+}
+
+func TestAddRecursiveExclude(t *testing.T) {
+ t.Run("Empty", func(t *testing.T) {
+ dir := t.TempDir()
+ archiver := &mockArchiver{}
+
+ err := addRecursiveExclude(archiver, "", dir, []string{}, false)
+ require.NoError(t, err)
+ assert.Empty(t, archiver.addedFiles)
+ })
+
+ t.Run("Single file", func(t *testing.T) {
+ dir := t.TempDir()
+ err := os.WriteFile(dir+"/example", nil, 0o666)
+ require.NoError(t, err)
+
+ t.Run("No exclude", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, nil, false)
+ require.NoError(t, err)
+ assert.Len(t, archiver.addedFiles, 1)
+ assert.Contains(t, archiver.addedFiles, "example")
+ })
+
+ t.Run("With exclude", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, []string{dir + "/example"}, false)
+ require.NoError(t, err)
+ assert.Empty(t, archiver.addedFiles)
+ })
+ })
+
+ t.Run("File inside directory", func(t *testing.T) {
+ dir := t.TempDir()
+ err := os.MkdirAll(dir+"/deep/nested/folder", 0o750)
+ require.NoError(t, err)
+ err = os.WriteFile(dir+"/deep/nested/folder/example", nil, 0o666)
+ require.NoError(t, err)
+ err = os.WriteFile(dir+"/deep/nested/folder/another-file", nil, 0o666)
+ require.NoError(t, err)
+
+ t.Run("No exclude", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, nil, false)
+ require.NoError(t, err)
+ assert.Len(t, archiver.addedFiles, 5)
+ assert.Contains(t, archiver.addedFiles, "deep")
+ assert.Contains(t, archiver.addedFiles, "deep/nested")
+ assert.Contains(t, archiver.addedFiles, "deep/nested/folder")
+ assert.Contains(t, archiver.addedFiles, "deep/nested/folder/example")
+ assert.Contains(t, archiver.addedFiles, "deep/nested/folder/another-file")
+ })
+
+ t.Run("Exclude first directory", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, []string{dir + "/deep"}, false)
+ require.NoError(t, err)
+ assert.Empty(t, archiver.addedFiles)
+ })
+
+ t.Run("Exclude nested directory", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, []string{dir + "/deep/nested/folder"}, false)
+ require.NoError(t, err)
+ assert.Len(t, archiver.addedFiles, 2)
+ assert.Contains(t, archiver.addedFiles, "deep")
+ assert.Contains(t, archiver.addedFiles, "deep/nested")
+ })
+
+ t.Run("Exclude file", func(t *testing.T) {
+ archiver := &mockArchiver{}
+
+ err = addRecursiveExclude(archiver, "", dir, []string{dir + "/deep/nested/folder/example"}, false)
+ require.NoError(t, err)
+ assert.Len(t, archiver.addedFiles, 4)
+ assert.Contains(t, archiver.addedFiles, "deep")
+ assert.Contains(t, archiver.addedFiles, "deep/nested")
+ assert.Contains(t, archiver.addedFiles, "deep/nested/folder")
+ assert.Contains(t, archiver.addedFiles, "deep/nested/folder/another-file")
+ })
+ })
+}
diff --git a/cmd/embedded.go b/cmd/embedded.go
new file mode 100644
index 0000000..9f03f7b
--- /dev/null
+++ b/cmd/embedded.go
@@ -0,0 +1,310 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/modules/assetfs"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/options"
+ "code.gitea.io/gitea/modules/public"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/templates"
+ "code.gitea.io/gitea/modules/util"
+
+ "github.com/gobwas/glob"
+ "github.com/urfave/cli/v2"
+)
+
+// CmdEmbedded represents the available extract sub-command.
+var (
+ CmdEmbedded = &cli.Command{
+ Name: "embedded",
+ Usage: "Extract embedded resources",
+ Description: "A command for extracting embedded resources, like templates and images",
+ Subcommands: []*cli.Command{
+ subcmdList,
+ subcmdView,
+ subcmdExtract,
+ },
+ }
+
+ subcmdList = &cli.Command{
+ Name: "list",
+ Usage: "List files matching the given pattern",
+ Action: runList,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "include-vendored",
+ Aliases: []string{"vendor"},
+ Usage: "Include files under public/vendor as well",
+ },
+ },
+ }
+
+ subcmdView = &cli.Command{
+ Name: "view",
+ Usage: "View a file matching the given pattern",
+ Action: runView,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "include-vendored",
+ Aliases: []string{"vendor"},
+ Usage: "Include files under public/vendor as well",
+ },
+ },
+ }
+
+ subcmdExtract = &cli.Command{
+ Name: "extract",
+ Usage: "Extract resources",
+ Action: runExtract,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "include-vendored",
+ Aliases: []string{"vendor"},
+ Usage: "Include files under public/vendor as well",
+ },
+ &cli.BoolFlag{
+ Name: "overwrite",
+ Usage: "Overwrite files if they already exist",
+ },
+ &cli.BoolFlag{
+ Name: "rename",
+ Usage: "Rename files as {name}.bak if they already exist (overwrites previous .bak)",
+ },
+ &cli.BoolFlag{
+ Name: "custom",
+ Usage: "Extract to the 'custom' directory as per app.ini",
+ },
+ &cli.StringFlag{
+ Name: "destination",
+ Aliases: []string{"dest-dir"},
+ Usage: "Extract to the specified directory",
+ },
+ },
+ }
+
+ matchedAssetFiles []assetFile
+)
+
+type assetFile struct {
+ fs *assetfs.LayeredFS
+ name string
+ path string
+}
+
+func initEmbeddedExtractor(c *cli.Context) error {
+ setupConsoleLogger(log.ERROR, log.CanColorStderr, os.Stderr)
+
+ patterns, err := compileCollectPatterns(c.Args().Slice())
+ if err != nil {
+ return err
+ }
+
+ collectAssetFilesByPattern(c, patterns, "options", options.BuiltinAssets())
+ collectAssetFilesByPattern(c, patterns, "public", public.BuiltinAssets())
+ collectAssetFilesByPattern(c, patterns, "templates", templates.BuiltinAssets())
+
+ return nil
+}
+
+func runList(c *cli.Context) error {
+ if err := runListDo(c); err != nil {
+ fmt.Fprintf(os.Stderr, "%v\n", err)
+ return err
+ }
+ return nil
+}
+
+func runView(c *cli.Context) error {
+ if err := runViewDo(c); err != nil {
+ fmt.Fprintf(os.Stderr, "%v\n", err)
+ return err
+ }
+ return nil
+}
+
+func runExtract(c *cli.Context) error {
+ if err := runExtractDo(c); err != nil {
+ fmt.Fprintf(os.Stderr, "%v\n", err)
+ return err
+ }
+ return nil
+}
+
+func runListDo(c *cli.Context) error {
+ if err := initEmbeddedExtractor(c); err != nil {
+ return err
+ }
+
+ for _, a := range matchedAssetFiles {
+ fmt.Println(a.path)
+ }
+
+ return nil
+}
+
+func runViewDo(c *cli.Context) error {
+ if err := initEmbeddedExtractor(c); err != nil {
+ return err
+ }
+
+ if len(matchedAssetFiles) == 0 {
+ return errors.New("no files matched the given pattern")
+ } else if len(matchedAssetFiles) > 1 {
+ return errors.New("too many files matched the given pattern, try to be more specific")
+ }
+
+ data, err := matchedAssetFiles[0].fs.ReadFile(matchedAssetFiles[0].name)
+ if err != nil {
+ return fmt.Errorf("%s: %w", matchedAssetFiles[0].path, err)
+ }
+
+ if _, err = os.Stdout.Write(data); err != nil {
+ return fmt.Errorf("%s: %w", matchedAssetFiles[0].path, err)
+ }
+
+ return nil
+}
+
+func runExtractDo(c *cli.Context) error {
+ if err := initEmbeddedExtractor(c); err != nil {
+ return err
+ }
+
+ if c.NArg() == 0 {
+ return errors.New("a list of pattern of files to extract is mandatory (e.g. '**' for all)")
+ }
+
+ destdir := "."
+
+ if c.IsSet("destination") {
+ destdir = c.String("destination")
+ } else if c.Bool("custom") {
+ destdir = setting.CustomPath
+ fmt.Println("Using app.ini at", setting.CustomConf)
+ }
+
+ fi, err := os.Stat(destdir)
+ if errors.Is(err, os.ErrNotExist) {
+ // In case Windows users attempt to provide a forward-slash path
+ wdestdir := filepath.FromSlash(destdir)
+ if wfi, werr := os.Stat(wdestdir); werr == nil {
+ destdir = wdestdir
+ fi = wfi
+ err = nil
+ }
+ }
+ if err != nil {
+ return fmt.Errorf("%s: %s", destdir, err)
+ } else if !fi.IsDir() {
+ return fmt.Errorf("destination %q is not a directory", destdir)
+ }
+
+ fmt.Printf("Extracting to %s:\n", destdir)
+
+ overwrite := c.Bool("overwrite")
+ rename := c.Bool("rename")
+
+ for _, a := range matchedAssetFiles {
+ if err := extractAsset(destdir, a, overwrite, rename); err != nil {
+ // Non-fatal error
+ fmt.Fprintf(os.Stderr, "%s: %v", a.path, err)
+ }
+ }
+
+ return nil
+}
+
+func extractAsset(d string, a assetFile, overwrite, rename bool) error {
+ dest := filepath.Join(d, filepath.FromSlash(a.path))
+ dir := filepath.Dir(dest)
+
+ data, err := a.fs.ReadFile(a.name)
+ if err != nil {
+ return fmt.Errorf("%s: %w", a.path, err)
+ }
+
+ if err := os.MkdirAll(dir, os.ModePerm); err != nil {
+ return fmt.Errorf("%s: %w", dir, err)
+ }
+
+ perms := os.ModePerm & 0o666
+
+ fi, err := os.Lstat(dest)
+ if err != nil {
+ if !errors.Is(err, os.ErrNotExist) {
+ return fmt.Errorf("%s: %w", dest, err)
+ }
+ } else if !overwrite && !rename {
+ fmt.Printf("%s already exists; skipped.\n", dest)
+ return nil
+ } else if !fi.Mode().IsRegular() {
+ return fmt.Errorf("%s already exists, but it's not a regular file", dest)
+ } else if rename {
+ if err := util.Rename(dest, dest+".bak"); err != nil {
+ return fmt.Errorf("error creating backup for %s: %w", dest, err)
+ }
+ // Attempt to respect file permissions mask (even if user:group will be set anew)
+ perms = fi.Mode()
+ }
+
+ file, err := os.OpenFile(dest, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, perms)
+ if err != nil {
+ return fmt.Errorf("%s: %w", dest, err)
+ }
+ defer file.Close()
+
+ if _, err = file.Write(data); err != nil {
+ return fmt.Errorf("%s: %w", dest, err)
+ }
+
+ fmt.Println(dest)
+
+ return nil
+}
+
+func collectAssetFilesByPattern(c *cli.Context, globs []glob.Glob, path string, layer *assetfs.Layer) {
+ fs := assetfs.Layered(layer)
+ files, err := fs.ListAllFiles(".", true)
+ if err != nil {
+ log.Error("Error listing files in %q: %v", path, err)
+ return
+ }
+ for _, name := range files {
+ if path == "public" &&
+ strings.HasPrefix(name, "vendor/") &&
+ !c.Bool("include-vendored") {
+ continue
+ }
+ matchName := path + "/" + name
+ for _, g := range globs {
+ if g.Match(matchName) {
+ matchedAssetFiles = append(matchedAssetFiles, assetFile{fs: fs, name: name, path: path + "/" + name})
+ break
+ }
+ }
+ }
+}
+
+func compileCollectPatterns(args []string) ([]glob.Glob, error) {
+ if len(args) == 0 {
+ args = []string{"**"}
+ }
+ pat := make([]glob.Glob, len(args))
+ for i := range args {
+ if g, err := glob.Compile(args[i], '/'); err != nil {
+ return nil, fmt.Errorf("'%s': Invalid glob pattern: %w", args[i], err)
+ } else { //nolint:revive
+ pat[i] = g
+ }
+ }
+ return pat, nil
+}
diff --git a/cmd/forgejo/actions.go b/cmd/forgejo/actions.go
new file mode 100644
index 0000000..1560b10
--- /dev/null
+++ b/cmd/forgejo/actions.go
@@ -0,0 +1,242 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "context"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "os"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/setting"
+ private_routers "code.gitea.io/gitea/routers/private"
+
+ "github.com/urfave/cli/v2"
+)
+
+func CmdActions(ctx context.Context) *cli.Command {
+ return &cli.Command{
+ Name: "actions",
+ Usage: "Commands for managing Forgejo Actions",
+ Subcommands: []*cli.Command{
+ SubcmdActionsGenerateRunnerToken(ctx),
+ SubcmdActionsGenerateRunnerSecret(ctx),
+ SubcmdActionsRegister(ctx),
+ },
+ }
+}
+
+func SubcmdActionsGenerateRunnerToken(ctx context.Context) *cli.Command {
+ return &cli.Command{
+ Name: "generate-runner-token",
+ Usage: "Generate a new token for a runner to use to register with the server",
+ Before: prepareWorkPathAndCustomConf(ctx),
+ Action: func(cliCtx *cli.Context) error { return RunGenerateActionsRunnerToken(ctx, cliCtx) },
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "scope",
+ Aliases: []string{"s"},
+ Value: "",
+ Usage: "{owner}[/{repo}] - leave empty for a global runner",
+ },
+ },
+ }
+}
+
+func SubcmdActionsGenerateRunnerSecret(ctx context.Context) *cli.Command {
+ return &cli.Command{
+ Name: "generate-secret",
+ Usage: "Generate a secret suitable for input to the register subcommand",
+ Action: func(cliCtx *cli.Context) error { return RunGenerateSecret(ctx, cliCtx) },
+ }
+}
+
+func SubcmdActionsRegister(ctx context.Context) *cli.Command {
+ return &cli.Command{
+ Name: "register",
+ Usage: "Idempotent registration of a runner using a shared secret",
+ Before: prepareWorkPathAndCustomConf(ctx),
+ Action: func(cliCtx *cli.Context) error { return RunRegister(ctx, cliCtx) },
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "secret",
+ Usage: "the secret the runner will use to connect as a 40 character hexadecimal string",
+ },
+ &cli.StringFlag{
+ Name: "secret-stdin",
+ Usage: "the secret the runner will use to connect as a 40 character hexadecimal string, read from stdin",
+ },
+ &cli.StringFlag{
+ Name: "secret-file",
+ Usage: "path to the file containing the secret the runner will use to connect as a 40 character hexadecimal string",
+ },
+ &cli.StringFlag{
+ Name: "scope",
+ Aliases: []string{"s"},
+ Value: "",
+ Usage: "{owner}[/{repo}] - leave empty for a global runner",
+ },
+ &cli.StringFlag{
+ Name: "labels",
+ Value: "",
+ Usage: "comma separated list of labels supported by the runner (e.g. docker,ubuntu-latest,self-hosted) (not required since v1.21)",
+ },
+ &cli.BoolFlag{
+ Name: "keep-labels",
+ Value: false,
+ Usage: "do not affect the labels when updating an existing runner",
+ },
+ &cli.StringFlag{
+ Name: "name",
+ Value: "runner",
+ Usage: "name of the runner (default runner)",
+ },
+ &cli.StringFlag{
+ Name: "version",
+ Value: "",
+ Usage: "version of the runner (not required since v1.21)",
+ },
+ },
+ }
+}
+
+func readSecret(ctx context.Context, cliCtx *cli.Context) (string, error) {
+ if cliCtx.IsSet("secret") {
+ return cliCtx.String("secret"), nil
+ }
+ if cliCtx.IsSet("secret-stdin") {
+ buf, err := io.ReadAll(ContextGetStdin(ctx))
+ if err != nil {
+ return "", err
+ }
+ return string(buf), nil
+ }
+ if cliCtx.IsSet("secret-file") {
+ path := cliCtx.String("secret-file")
+ buf, err := os.ReadFile(path)
+ if err != nil {
+ return "", err
+ }
+ return string(buf), nil
+ }
+ return "", fmt.Errorf("at least one of the --secret, --secret-stdin, --secret-file options is required")
+}
+
+func validateSecret(secret string) error {
+ secretLen := len(secret)
+ if secretLen != 40 {
+ return fmt.Errorf("the secret must be exactly 40 characters long, not %d: generate-secret can provide a secret matching the requirements", secretLen)
+ }
+ if _, err := hex.DecodeString(secret); err != nil {
+ return fmt.Errorf("the secret must be an hexadecimal string: %w", err)
+ }
+ return nil
+}
+
+func getLabels(cliCtx *cli.Context) (*[]string, error) {
+ if !cliCtx.Bool("keep-labels") {
+ lblValue := strings.Split(cliCtx.String("labels"), ",")
+ return &lblValue, nil
+ }
+ if cliCtx.String("labels") != "" {
+ return nil, fmt.Errorf("--labels and --keep-labels should not be used together")
+ }
+ return nil, nil
+}
+
+func RunRegister(ctx context.Context, cliCtx *cli.Context) error {
+ var cancel context.CancelFunc
+ if !ContextGetNoInit(ctx) {
+ ctx, cancel = installSignals(ctx)
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+ }
+ setting.MustInstalled()
+
+ secret, err := readSecret(ctx, cliCtx)
+ if err != nil {
+ return err
+ }
+ if err := validateSecret(secret); err != nil {
+ return err
+ }
+ scope := cliCtx.String("scope")
+ name := cliCtx.String("name")
+ version := cliCtx.String("version")
+ labels, err := getLabels(cliCtx)
+ if err != nil {
+ return err
+ }
+
+ //
+ // There are two kinds of tokens
+ //
+ // - "registration token" only used when a runner interacts to
+ // register
+ //
+ // - "token" obtained after a successful registration and stored by
+ // the runner to authenticate
+ //
+ // The register subcommand does not need a "registration token", it
+ // needs a "token". Using the same name is confusing and secret is
+ // preferred for this reason in the cli.
+ //
+ // The ActionsRunnerRegister argument is token to be consistent with
+ // the internal naming. It is still confusing to the developer but
+ // not to the user.
+ //
+ owner, repo, err := private_routers.ParseScope(ctx, scope)
+ if err != nil {
+ return err
+ }
+
+ runner, err := actions_model.RegisterRunner(ctx, owner, repo, secret, labels, name, version)
+ if err != nil {
+ return fmt.Errorf("error while registering runner: %v", err)
+ }
+
+ if _, err := fmt.Fprintf(ContextGetStdout(ctx), "%s", runner.UUID); err != nil {
+ panic(err)
+ }
+ return nil
+}
+
+func RunGenerateSecret(ctx context.Context, cliCtx *cli.Context) error {
+ runner := actions_model.ActionRunner{}
+ if err := runner.GenerateToken(); err != nil {
+ return err
+ }
+ if _, err := fmt.Fprintf(ContextGetStdout(ctx), "%s", runner.Token); err != nil {
+ panic(err)
+ }
+ return nil
+}
+
+func RunGenerateActionsRunnerToken(ctx context.Context, cliCtx *cli.Context) error {
+ if !ContextGetNoInit(ctx) {
+ var cancel context.CancelFunc
+ ctx, cancel = installSignals(ctx)
+ defer cancel()
+ }
+
+ setting.MustInstalled()
+
+ scope := cliCtx.String("scope")
+
+ respText, extra := private.GenerateActionsRunnerToken(ctx, scope)
+ if extra.HasError() {
+ return handleCliResponseExtra(ctx, extra)
+ }
+ if _, err := fmt.Fprintf(ContextGetStdout(ctx), "%s", respText.Text); err != nil {
+ panic(err)
+ }
+ return nil
+}
diff --git a/cmd/forgejo/actions_test.go b/cmd/forgejo/actions_test.go
new file mode 100644
index 0000000..897af98
--- /dev/null
+++ b/cmd/forgejo/actions_test.go
@@ -0,0 +1,88 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "fmt"
+ "testing"
+
+ "code.gitea.io/gitea/services/context"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/urfave/cli/v2"
+)
+
+func TestActions_getLabels(t *testing.T) {
+ type testCase struct {
+ args []string
+ hasLabels bool
+ hasError bool
+ labels []string
+ }
+ type resultType struct {
+ labels *[]string
+ err error
+ }
+
+ cases := []testCase{
+ {
+ args: []string{"x"},
+ hasLabels: true,
+ hasError: false,
+ labels: []string{""},
+ }, {
+ args: []string{"x", "--labels", "a,b"},
+ hasLabels: true,
+ hasError: false,
+ labels: []string{"a", "b"},
+ }, {
+ args: []string{"x", "--keep-labels"},
+ hasLabels: false,
+ hasError: false,
+ }, {
+ args: []string{"x", "--keep-labels", "--labels", "a,b"},
+ hasLabels: false,
+ hasError: true,
+ }, {
+ // this edge-case exists because that's what actually happens
+ // when no '--labels ...' options are present
+ args: []string{"x", "--keep-labels", "--labels", ""},
+ hasLabels: false,
+ hasError: false,
+ },
+ }
+
+ flags := SubcmdActionsRegister(context.Context{}).Flags
+ for _, c := range cases {
+ t.Run(fmt.Sprintf("args: %v", c.args), func(t *testing.T) {
+ // Create a copy of command to test
+ var result *resultType
+ app := cli.NewApp()
+ app.Flags = flags
+ app.Action = func(ctx *cli.Context) error {
+ labels, err := getLabels(ctx)
+ result = &resultType{labels, err}
+ return nil
+ }
+
+ // Run it
+ _ = app.Run(c.args)
+
+ // Test the results
+ require.NotNil(t, result)
+ if c.hasLabels {
+ assert.NotNil(t, result.labels)
+ assert.Equal(t, c.labels, *result.labels)
+ } else {
+ assert.Nil(t, result.labels)
+ }
+ if c.hasError {
+ require.Error(t, result.err)
+ } else {
+ assert.NoError(t, result.err)
+ }
+ })
+ }
+}
diff --git a/cmd/forgejo/f3.go b/cmd/forgejo/f3.go
new file mode 100644
index 0000000..5a0d0ac
--- /dev/null
+++ b/cmd/forgejo/f3.go
@@ -0,0 +1,77 @@
+// Copyright Earl Warren <contact@earl-warren.org>
+// Copyright Loïc Dachary <loic@dachary.org>
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "context"
+ "errors"
+
+ "code.gitea.io/gitea/models"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/services/f3/util"
+
+ _ "code.gitea.io/gitea/services/f3/driver" // register the driver
+
+ f3_cmd "code.forgejo.org/f3/gof3/v3/cmd"
+ f3_logger "code.forgejo.org/f3/gof3/v3/logger"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+ "github.com/urfave/cli/v2"
+)
+
+func CmdF3(ctx context.Context) *cli.Command {
+ ctx = f3_logger.ContextSetLogger(ctx, util.NewF3Logger(nil, log.GetLogger(log.DEFAULT)))
+ return &cli.Command{
+ Name: "f3",
+ Usage: "F3",
+ Subcommands: []*cli.Command{
+ SubcmdF3Mirror(ctx),
+ },
+ }
+}
+
+func SubcmdF3Mirror(ctx context.Context) *cli.Command {
+ mirrorCmd := f3_cmd.CreateCmdMirror(ctx)
+ mirrorCmd.Before = prepareWorkPathAndCustomConf(ctx)
+ f3Action := mirrorCmd.Action
+ mirrorCmd.Action = func(c *cli.Context) error { return runMirror(ctx, c, f3Action) }
+ return mirrorCmd
+}
+
+func runMirror(ctx context.Context, c *cli.Context, action cli.ActionFunc) error {
+ setting.LoadF3Setting()
+ if !setting.F3.Enabled {
+ return errors.New("F3 is disabled, it is not ready to be used and is only present for development purposes")
+ }
+
+ var cancel context.CancelFunc
+ if !ContextGetNoInit(ctx) {
+ ctx, cancel = installSignals(ctx)
+ defer cancel()
+
+ if err := initDB(ctx); err != nil {
+ return err
+ }
+
+ if err := storage.Init(); err != nil {
+ return err
+ }
+
+ if err := git.InitSimple(ctx); err != nil {
+ return err
+ }
+ if err := models.Init(ctx); err != nil {
+ return err
+ }
+ }
+
+ err := action(c)
+ if panicError, ok := err.(f3_util.PanicError); ok {
+ log.Debug("F3 Stack trace\n%s", panicError.Stack())
+ }
+ return err
+}
diff --git a/cmd/forgejo/forgejo.go b/cmd/forgejo/forgejo.go
new file mode 100644
index 0000000..1b7e16c
--- /dev/null
+++ b/cmd/forgejo/forgejo.go
@@ -0,0 +1,170 @@
+// Copyright The Forgejo Authors.
+// SPDX-License-Identifier: MIT
+
+package forgejo
+
+import (
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "os/signal"
+ "syscall"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+type key int
+
+const (
+ noInitKey key = iota + 1
+ noExitKey
+ stdoutKey
+ stderrKey
+ stdinKey
+)
+
+func CmdForgejo(ctx context.Context) *cli.Command {
+ return &cli.Command{
+ Name: "forgejo-cli",
+ Usage: "Forgejo CLI",
+ Flags: []cli.Flag{},
+ Subcommands: []*cli.Command{
+ CmdActions(ctx),
+ CmdF3(ctx),
+ },
+ }
+}
+
+func ContextSetNoInit(ctx context.Context, value bool) context.Context {
+ return context.WithValue(ctx, noInitKey, value)
+}
+
+func ContextGetNoInit(ctx context.Context) bool {
+ value, ok := ctx.Value(noInitKey).(bool)
+ return ok && value
+}
+
+func ContextSetNoExit(ctx context.Context, value bool) context.Context {
+ return context.WithValue(ctx, noExitKey, value)
+}
+
+func ContextGetNoExit(ctx context.Context) bool {
+ value, ok := ctx.Value(noExitKey).(bool)
+ return ok && value
+}
+
+func ContextSetStderr(ctx context.Context, value io.Writer) context.Context {
+ return context.WithValue(ctx, stderrKey, value)
+}
+
+func ContextGetStderr(ctx context.Context) io.Writer {
+ value, ok := ctx.Value(stderrKey).(io.Writer)
+ if !ok {
+ return os.Stderr
+ }
+ return value
+}
+
+func ContextSetStdout(ctx context.Context, value io.Writer) context.Context {
+ return context.WithValue(ctx, stdoutKey, value)
+}
+
+func ContextGetStdout(ctx context.Context) io.Writer {
+ value, ok := ctx.Value(stderrKey).(io.Writer)
+ if !ok {
+ return os.Stdout
+ }
+ return value
+}
+
+func ContextSetStdin(ctx context.Context, value io.Reader) context.Context {
+ return context.WithValue(ctx, stdinKey, value)
+}
+
+func ContextGetStdin(ctx context.Context) io.Reader {
+ value, ok := ctx.Value(stdinKey).(io.Reader)
+ if !ok {
+ return os.Stdin
+ }
+ return value
+}
+
+// copied from ../cmd.go
+func initDB(ctx context.Context) error {
+ setting.MustInstalled()
+ setting.LoadDBSetting()
+ setting.InitSQLLoggersForCli(log.INFO)
+
+ if setting.Database.Type == "" {
+ log.Fatal(`Database settings are missing from the configuration file: %q.
+Ensure you are running in the correct environment or set the correct configuration file with -c.
+If this is the intended configuration file complete the [database] section.`, setting.CustomConf)
+ }
+ if err := db.InitEngine(ctx); err != nil {
+ return fmt.Errorf("unable to initialize the database using the configuration in %q. Error: %w", setting.CustomConf, err)
+ }
+ return nil
+}
+
+// copied from ../cmd.go
+func installSignals(ctx context.Context) (context.Context, context.CancelFunc) {
+ ctx, cancel := context.WithCancel(ctx)
+ go func() {
+ // install notify
+ signalChannel := make(chan os.Signal, 1)
+
+ signal.Notify(
+ signalChannel,
+ syscall.SIGINT,
+ syscall.SIGTERM,
+ )
+ select {
+ case <-signalChannel:
+ case <-ctx.Done():
+ }
+ cancel()
+ signal.Reset()
+ }()
+
+ return ctx, cancel
+}
+
+func handleCliResponseExtra(ctx context.Context, extra private.ResponseExtra) error {
+ if false && extra.UserMsg != "" {
+ if _, err := fmt.Fprintf(ContextGetStdout(ctx), "%s", extra.UserMsg); err != nil {
+ panic(err)
+ }
+ }
+ if ContextGetNoExit(ctx) {
+ return extra.Error
+ }
+ return cli.Exit(extra.Error, 1)
+}
+
+func prepareWorkPathAndCustomConf(ctx context.Context) func(c *cli.Context) error {
+ return func(c *cli.Context) error {
+ if !ContextGetNoInit(ctx) {
+ var args setting.ArgWorkPathAndCustomConf
+ // from children to parent, check the global flags
+ for _, curCtx := range c.Lineage() {
+ if curCtx.IsSet("work-path") && args.WorkPath == "" {
+ args.WorkPath = curCtx.String("work-path")
+ }
+ if curCtx.IsSet("custom-path") && args.CustomPath == "" {
+ args.CustomPath = curCtx.String("custom-path")
+ }
+ if curCtx.IsSet("config") && args.CustomConf == "" {
+ args.CustomConf = curCtx.String("config")
+ }
+ }
+ setting.InitWorkPathAndCommonConfig(os.Getenv, args)
+ }
+ return nil
+ }
+}
diff --git a/cmd/generate.go b/cmd/generate.go
new file mode 100644
index 0000000..8069462
--- /dev/null
+++ b/cmd/generate.go
@@ -0,0 +1,100 @@
+// Copyright 2016 The Gogs Authors. All rights reserved.
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ "os"
+
+ "code.gitea.io/gitea/modules/generate"
+
+ "github.com/mattn/go-isatty"
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ // CmdGenerate represents the available generate sub-command.
+ CmdGenerate = &cli.Command{
+ Name: "generate",
+ Usage: "Generate Gitea's secrets/keys/tokens",
+ Subcommands: []*cli.Command{
+ subcmdSecret,
+ },
+ }
+
+ subcmdSecret = &cli.Command{
+ Name: "secret",
+ Usage: "Generate a secret token",
+ Subcommands: []*cli.Command{
+ microcmdGenerateInternalToken,
+ microcmdGenerateLfsJwtSecret,
+ microcmdGenerateSecretKey,
+ },
+ }
+
+ microcmdGenerateInternalToken = &cli.Command{
+ Name: "INTERNAL_TOKEN",
+ Usage: "Generate a new INTERNAL_TOKEN",
+ Action: runGenerateInternalToken,
+ }
+
+ microcmdGenerateLfsJwtSecret = &cli.Command{
+ Name: "JWT_SECRET",
+ Aliases: []string{"LFS_JWT_SECRET"},
+ Usage: "Generate a new JWT_SECRET",
+ Action: runGenerateLfsJwtSecret,
+ }
+
+ microcmdGenerateSecretKey = &cli.Command{
+ Name: "SECRET_KEY",
+ Usage: "Generate a new SECRET_KEY",
+ Action: runGenerateSecretKey,
+ }
+)
+
+func runGenerateInternalToken(c *cli.Context) error {
+ internalToken, err := generate.NewInternalToken()
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("%s", internalToken)
+
+ if isatty.IsTerminal(os.Stdout.Fd()) {
+ fmt.Printf("\n")
+ }
+
+ return nil
+}
+
+func runGenerateLfsJwtSecret(c *cli.Context) error {
+ _, jwtSecretBase64, err := generate.NewJwtSecret()
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("%s", jwtSecretBase64)
+
+ if isatty.IsTerminal(os.Stdout.Fd()) {
+ fmt.Printf("\n")
+ }
+
+ return nil
+}
+
+func runGenerateSecretKey(c *cli.Context) error {
+ secretKey, err := generate.NewSecretKey()
+ if err != nil {
+ return err
+ }
+
+ fmt.Printf("%s", secretKey)
+
+ if isatty.IsTerminal(os.Stdout.Fd()) {
+ fmt.Printf("\n")
+ }
+
+ return nil
+}
diff --git a/cmd/hook.go b/cmd/hook.go
new file mode 100644
index 0000000..edab611
--- /dev/null
+++ b/cmd/hook.go
@@ -0,0 +1,795 @@
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "fmt"
+ "io"
+ "os"
+ "strconv"
+ "strings"
+ "time"
+
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/git/pushoptions"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/private"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+const (
+ hookBatchSize = 30
+)
+
+var (
+ // CmdHook represents the available hooks sub-command.
+ CmdHook = &cli.Command{
+ Name: "hook",
+ Usage: "(internal) Should only be called by Git",
+ Description: "Delegate commands to corresponding Git hooks",
+ Before: PrepareConsoleLoggerLevel(log.FATAL),
+ Subcommands: []*cli.Command{
+ subcmdHookPreReceive,
+ subcmdHookUpdate,
+ subcmdHookPostReceive,
+ subcmdHookProcReceive,
+ },
+ }
+
+ subcmdHookPreReceive = &cli.Command{
+ Name: "pre-receive",
+ Usage: "Delegate pre-receive Git hook",
+ Description: "This command should only be called by Git",
+ Action: runHookPreReceive,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ }
+ subcmdHookUpdate = &cli.Command{
+ Name: "update",
+ Usage: "Delegate update Git hook",
+ Description: "This command should only be called by Git",
+ Action: runHookUpdate,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ }
+ subcmdHookPostReceive = &cli.Command{
+ Name: "post-receive",
+ Usage: "Delegate post-receive Git hook",
+ Description: "This command should only be called by Git",
+ Action: runHookPostReceive,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ }
+ // Note: new hook since git 2.29
+ subcmdHookProcReceive = &cli.Command{
+ Name: "proc-receive",
+ Usage: "Delegate proc-receive Git hook",
+ Description: "This command should only be called by Git",
+ Action: runHookProcReceive,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ }
+)
+
+type delayWriter struct {
+ internal io.Writer
+ buf *bytes.Buffer
+ timer *time.Timer
+}
+
+func newDelayWriter(internal io.Writer, delay time.Duration) *delayWriter {
+ timer := time.NewTimer(delay)
+ return &delayWriter{
+ internal: internal,
+ buf: &bytes.Buffer{},
+ timer: timer,
+ }
+}
+
+func (d *delayWriter) Write(p []byte) (n int, err error) {
+ if d.buf != nil {
+ select {
+ case <-d.timer.C:
+ _, err := d.internal.Write(d.buf.Bytes())
+ if err != nil {
+ return 0, err
+ }
+ d.buf = nil
+ return d.internal.Write(p)
+ default:
+ return d.buf.Write(p)
+ }
+ }
+ return d.internal.Write(p)
+}
+
+func (d *delayWriter) WriteString(s string) (n int, err error) {
+ if d.buf != nil {
+ select {
+ case <-d.timer.C:
+ _, err := d.internal.Write(d.buf.Bytes())
+ if err != nil {
+ return 0, err
+ }
+ d.buf = nil
+ return d.internal.Write([]byte(s))
+ default:
+ return d.buf.WriteString(s)
+ }
+ }
+ return d.internal.Write([]byte(s))
+}
+
+func (d *delayWriter) Close() error {
+ if d.timer.Stop() {
+ d.buf = nil
+ }
+ if d.buf == nil {
+ return nil
+ }
+ _, err := d.internal.Write(d.buf.Bytes())
+ d.buf = nil
+ return err
+}
+
+type nilWriter struct{}
+
+func (n *nilWriter) Write(p []byte) (int, error) {
+ return len(p), nil
+}
+
+func (n *nilWriter) WriteString(s string) (int, error) {
+ return len(s), nil
+}
+
+func runHookPreReceive(c *cli.Context) error {
+ if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal {
+ return nil
+ }
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+
+ if len(os.Getenv("SSH_ORIGINAL_COMMAND")) == 0 {
+ if setting.OnlyAllowPushIfGiteaEnvironmentSet {
+ return fail(ctx, `Rejecting changes as Forgejo environment not set.
+If you are pushing over SSH you must push with a key managed by
+Forgejo or set your environment appropriately.`, "")
+ }
+ return nil
+ }
+
+ // the environment is set by serv command
+ isWiki, _ := strconv.ParseBool(os.Getenv(repo_module.EnvRepoIsWiki))
+ username := os.Getenv(repo_module.EnvRepoUsername)
+ reponame := os.Getenv(repo_module.EnvRepoName)
+ userID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64)
+ prID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPRID), 10, 64)
+ deployKeyID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvDeployKeyID), 10, 64)
+ actionPerm, _ := strconv.ParseInt(os.Getenv(repo_module.EnvActionPerm), 10, 64)
+
+ hookOptions := private.HookOptions{
+ UserID: userID,
+ GitAlternativeObjectDirectories: os.Getenv(private.GitAlternativeObjectDirectories),
+ GitObjectDirectory: os.Getenv(private.GitObjectDirectory),
+ GitQuarantinePath: os.Getenv(private.GitQuarantinePath),
+ GitPushOptions: pushoptions.New().ReadEnv().Map(),
+ PullRequestID: prID,
+ DeployKeyID: deployKeyID,
+ ActionPerm: int(actionPerm),
+ }
+
+ scanner := bufio.NewScanner(os.Stdin)
+
+ oldCommitIDs := make([]string, hookBatchSize)
+ newCommitIDs := make([]string, hookBatchSize)
+ refFullNames := make([]git.RefName, hookBatchSize)
+ count := 0
+ total := 0
+ lastline := 0
+
+ var out io.Writer
+ out = &nilWriter{}
+ if setting.Git.VerbosePush {
+ if setting.Git.VerbosePushDelay > 0 {
+ dWriter := newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
+ defer dWriter.Close()
+ out = dWriter
+ } else {
+ out = os.Stdout
+ }
+ }
+
+ supportProcReceive := false
+ if git.CheckGitVersionAtLeast("2.29") == nil {
+ supportProcReceive = true
+ }
+
+ for scanner.Scan() {
+ // TODO: support news feeds for wiki
+ if isWiki {
+ continue
+ }
+
+ fields := bytes.Fields(scanner.Bytes())
+ if len(fields) != 3 {
+ continue
+ }
+
+ oldCommitID := string(fields[0])
+ newCommitID := string(fields[1])
+ refFullName := git.RefName(fields[2])
+ total++
+ lastline++
+
+ // If the ref is a branch or tag, check if it's protected
+ // if supportProcReceive all ref should be checked because
+ // permission check was delayed
+ if supportProcReceive || refFullName.IsBranch() || refFullName.IsTag() {
+ oldCommitIDs[count] = oldCommitID
+ newCommitIDs[count] = newCommitID
+ refFullNames[count] = refFullName
+ count++
+ fmt.Fprintf(out, "*")
+
+ if count >= hookBatchSize {
+ fmt.Fprintf(out, " Checking %d references\n", count)
+
+ hookOptions.OldCommitIDs = oldCommitIDs
+ hookOptions.NewCommitIDs = newCommitIDs
+ hookOptions.RefFullNames = refFullNames
+ extra := private.HookPreReceive(ctx, username, reponame, hookOptions)
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "HookPreReceive(batch) failed: %v", extra.Error)
+ }
+ count = 0
+ lastline = 0
+ }
+ } else {
+ fmt.Fprintf(out, ".")
+ }
+ if lastline >= hookBatchSize {
+ fmt.Fprintf(out, "\n")
+ lastline = 0
+ }
+ }
+
+ if count > 0 {
+ hookOptions.OldCommitIDs = oldCommitIDs[:count]
+ hookOptions.NewCommitIDs = newCommitIDs[:count]
+ hookOptions.RefFullNames = refFullNames[:count]
+
+ fmt.Fprintf(out, " Checking %d references\n", count)
+
+ extra := private.HookPreReceive(ctx, username, reponame, hookOptions)
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "HookPreReceive(last) failed: %v", extra.Error)
+ }
+ } else if lastline > 0 {
+ fmt.Fprintf(out, "\n")
+ }
+
+ fmt.Fprintf(out, "Checked %d references in total\n", total)
+ return nil
+}
+
+// runHookUpdate process the update hook: https://git-scm.com/docs/githooks#update
+func runHookUpdate(c *cli.Context) error {
+ // Now if we're an internal don't do anything else
+ if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal {
+ return nil
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ if c.NArg() != 3 {
+ return nil
+ }
+ args := c.Args().Slice()
+
+ // The arguments given to the hook are in order: reference name, old commit ID and new commit ID.
+ refFullName := git.RefName(args[0])
+ newCommitID := args[2]
+
+ // Only process pull references.
+ if !refFullName.IsPull() {
+ return nil
+ }
+
+ // Empty new commit ID means deletion.
+ if git.IsEmptyCommitID(newCommitID, nil) {
+ return fail(ctx, fmt.Sprintf("The deletion of %s is skipped as it's an internal reference.", refFullName), "")
+ }
+
+ return nil
+}
+
+func runHookPostReceive(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+
+ // First of all run update-server-info no matter what
+ if _, _, err := git.NewCommand(ctx, "update-server-info").RunStdString(nil); err != nil {
+ return fmt.Errorf("Failed to call 'git update-server-info': %w", err)
+ }
+
+ // Now if we're an internal don't do anything else
+ if isInternal, _ := strconv.ParseBool(os.Getenv(repo_module.EnvIsInternal)); isInternal {
+ return nil
+ }
+
+ if len(os.Getenv("SSH_ORIGINAL_COMMAND")) == 0 {
+ if setting.OnlyAllowPushIfGiteaEnvironmentSet {
+ return fail(ctx, `Rejecting changes as Forgejo environment not set.
+If you are pushing over SSH you must push with a key managed by
+Forgejo or set your environment appropriately.`, "")
+ }
+ return nil
+ }
+
+ var out io.Writer
+ out = &nilWriter{}
+ if setting.Git.VerbosePush {
+ if setting.Git.VerbosePushDelay > 0 {
+ dWriter := newDelayWriter(os.Stdout, setting.Git.VerbosePushDelay)
+ defer dWriter.Close()
+ out = dWriter
+ } else {
+ out = os.Stdout
+ }
+ }
+
+ // the environment is set by serv command
+ repoUser := os.Getenv(repo_module.EnvRepoUsername)
+ isWiki, _ := strconv.ParseBool(os.Getenv(repo_module.EnvRepoIsWiki))
+ repoName := os.Getenv(repo_module.EnvRepoName)
+ pusherID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64)
+ prID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPRID), 10, 64)
+ pusherName := os.Getenv(repo_module.EnvPusherName)
+
+ hookOptions := private.HookOptions{
+ UserName: pusherName,
+ UserID: pusherID,
+ GitAlternativeObjectDirectories: os.Getenv(private.GitAlternativeObjectDirectories),
+ GitObjectDirectory: os.Getenv(private.GitObjectDirectory),
+ GitQuarantinePath: os.Getenv(private.GitQuarantinePath),
+ GitPushOptions: pushoptions.New().ReadEnv().Map(),
+ PullRequestID: prID,
+ PushTrigger: repo_module.PushTrigger(os.Getenv(repo_module.EnvPushTrigger)),
+ }
+ oldCommitIDs := make([]string, hookBatchSize)
+ newCommitIDs := make([]string, hookBatchSize)
+ refFullNames := make([]git.RefName, hookBatchSize)
+ count := 0
+ total := 0
+ wasEmpty := false
+ masterPushed := false
+ results := make([]private.HookPostReceiveBranchResult, 0)
+
+ scanner := bufio.NewScanner(os.Stdin)
+ for scanner.Scan() {
+ // TODO: support news feeds for wiki
+ if isWiki {
+ continue
+ }
+
+ fields := bytes.Fields(scanner.Bytes())
+ if len(fields) != 3 {
+ continue
+ }
+
+ fmt.Fprintf(out, ".")
+ oldCommitIDs[count] = string(fields[0])
+ newCommitIDs[count] = string(fields[1])
+ refFullNames[count] = git.RefName(fields[2])
+
+ if refFullNames[count] == git.BranchPrefix+"master" && !git.IsEmptyCommitID(newCommitIDs[count], nil) && count == total {
+ masterPushed = true
+ }
+ count++
+ total++
+
+ if count >= hookBatchSize {
+ fmt.Fprintf(out, " Processing %d references\n", count)
+ hookOptions.OldCommitIDs = oldCommitIDs
+ hookOptions.NewCommitIDs = newCommitIDs
+ hookOptions.RefFullNames = refFullNames
+ resp, extra := private.HookPostReceive(ctx, repoUser, repoName, hookOptions)
+ if extra.HasError() {
+ hookPrintResults(results)
+ return fail(ctx, extra.UserMsg, "HookPostReceive failed: %v", extra.Error)
+ }
+ wasEmpty = wasEmpty || resp.RepoWasEmpty
+ results = append(results, resp.Results...)
+ count = 0
+ }
+ }
+
+ if count == 0 {
+ if wasEmpty && masterPushed {
+ // We need to tell the repo to reset the default branch to master
+ extra := private.SetDefaultBranch(ctx, repoUser, repoName, "master")
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "SetDefaultBranch failed: %v", extra.Error)
+ }
+ }
+ fmt.Fprintf(out, "Processed %d references in total\n", total)
+
+ hookPrintResults(results)
+ return nil
+ }
+
+ hookOptions.OldCommitIDs = oldCommitIDs[:count]
+ hookOptions.NewCommitIDs = newCommitIDs[:count]
+ hookOptions.RefFullNames = refFullNames[:count]
+
+ fmt.Fprintf(out, " Processing %d references\n", count)
+
+ resp, extra := private.HookPostReceive(ctx, repoUser, repoName, hookOptions)
+ if resp == nil {
+ hookPrintResults(results)
+ return fail(ctx, extra.UserMsg, "HookPostReceive failed: %v", extra.Error)
+ }
+ wasEmpty = wasEmpty || resp.RepoWasEmpty
+ results = append(results, resp.Results...)
+
+ fmt.Fprintf(out, "Processed %d references in total\n", total)
+
+ if wasEmpty && masterPushed {
+ // We need to tell the repo to reset the default branch to master
+ extra := private.SetDefaultBranch(ctx, repoUser, repoName, "master")
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "SetDefaultBranch failed: %v", extra.Error)
+ }
+ }
+
+ hookPrintResults(results)
+ return nil
+}
+
+func hookPrintResults(results []private.HookPostReceiveBranchResult) {
+ for _, res := range results {
+ if !res.Message {
+ continue
+ }
+
+ fmt.Fprintln(os.Stderr, "")
+ if res.Create {
+ fmt.Fprintf(os.Stderr, "Create a new pull request for '%s':\n", res.Branch)
+ fmt.Fprintf(os.Stderr, " %s\n", res.URL)
+ } else {
+ fmt.Fprint(os.Stderr, "Visit the existing pull request:\n")
+ fmt.Fprintf(os.Stderr, " %s\n", res.URL)
+ }
+ fmt.Fprintln(os.Stderr, "")
+ _ = os.Stderr.Sync()
+ }
+}
+
+func runHookProcReceive(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+
+ if len(os.Getenv("SSH_ORIGINAL_COMMAND")) == 0 {
+ if setting.OnlyAllowPushIfGiteaEnvironmentSet {
+ return fail(ctx, `Rejecting changes as Forgejo environment not set.
+If you are pushing over SSH you must push with a key managed by
+Forgejo or set your environment appropriately.`, "")
+ }
+ return nil
+ }
+
+ if git.CheckGitVersionAtLeast("2.29") != nil {
+ return fail(ctx, "No proc-receive support", "current git version doesn't support proc-receive.")
+ }
+
+ reader := bufio.NewReader(os.Stdin)
+ repoUser := os.Getenv(repo_module.EnvRepoUsername)
+ repoName := os.Getenv(repo_module.EnvRepoName)
+ pusherID, _ := strconv.ParseInt(os.Getenv(repo_module.EnvPusherID), 10, 64)
+ pusherName := os.Getenv(repo_module.EnvPusherName)
+
+ // 1. Version and features negotiation.
+ // S: PKT-LINE(version=1\0push-options atomic...) / PKT-LINE(version=1\n)
+ // S: flush-pkt
+ // H: PKT-LINE(version=1\0push-options...)
+ // H: flush-pkt
+
+ rs, err := readPktLine(ctx, reader, pktLineTypeData)
+ if err != nil {
+ return err
+ }
+
+ const VersionHead string = "version=1"
+
+ var (
+ hasPushOptions bool
+ response = []byte(VersionHead)
+ requestOptions []string
+ )
+
+ index := bytes.IndexByte(rs.Data, byte(0))
+ if index >= len(rs.Data) {
+ return fail(ctx, "Protocol: format error", "pkt-line: format error %s", rs.Data)
+ }
+
+ if index < 0 {
+ if len(rs.Data) == 10 && rs.Data[9] == '\n' {
+ index = 9
+ } else {
+ return fail(ctx, "Protocol: format error", "pkt-line: format error %s", rs.Data)
+ }
+ }
+
+ if string(rs.Data[0:index]) != VersionHead {
+ return fail(ctx, "Protocol: version error", "Received unsupported version: %s", string(rs.Data[0:index]))
+ }
+ requestOptions = strings.Split(string(rs.Data[index+1:]), " ")
+
+ for _, option := range requestOptions {
+ if strings.HasPrefix(option, "push-options") {
+ response = append(response, byte(0))
+ response = append(response, []byte("push-options")...)
+ hasPushOptions = true
+ }
+ }
+ response = append(response, '\n')
+
+ _, err = readPktLine(ctx, reader, pktLineTypeFlush)
+ if err != nil {
+ return err
+ }
+
+ err = writeDataPktLine(ctx, os.Stdout, response)
+ if err != nil {
+ return err
+ }
+
+ err = writeFlushPktLine(ctx, os.Stdout)
+ if err != nil {
+ return err
+ }
+
+ // 2. receive commands from server.
+ // S: PKT-LINE(<old-oid> <new-oid> <ref>)
+ // S: ... ...
+ // S: flush-pkt
+ // # [receive push-options]
+ // S: PKT-LINE(push-option)
+ // S: ... ...
+ // S: flush-pkt
+ hookOptions := private.HookOptions{
+ UserName: pusherName,
+ UserID: pusherID,
+ }
+ hookOptions.OldCommitIDs = make([]string, 0, hookBatchSize)
+ hookOptions.NewCommitIDs = make([]string, 0, hookBatchSize)
+ hookOptions.RefFullNames = make([]git.RefName, 0, hookBatchSize)
+
+ for {
+ // note: pktLineTypeUnknow means pktLineTypeFlush and pktLineTypeData all allowed
+ rs, err = readPktLine(ctx, reader, pktLineTypeUnknown)
+ if err != nil {
+ return err
+ }
+
+ if rs.Type == pktLineTypeFlush {
+ break
+ }
+ t := strings.SplitN(string(rs.Data), " ", 3)
+ if len(t) != 3 {
+ continue
+ }
+ hookOptions.OldCommitIDs = append(hookOptions.OldCommitIDs, t[0])
+ hookOptions.NewCommitIDs = append(hookOptions.NewCommitIDs, t[1])
+ hookOptions.RefFullNames = append(hookOptions.RefFullNames, git.RefName(t[2]))
+ }
+
+ hookOptions.GitPushOptions = make(map[string]string)
+
+ if hasPushOptions {
+ pushOptions := pushoptions.NewFromMap(&hookOptions.GitPushOptions)
+ for {
+ rs, err = readPktLine(ctx, reader, pktLineTypeUnknown)
+ if err != nil {
+ return err
+ }
+
+ if rs.Type == pktLineTypeFlush {
+ break
+ }
+ pushOptions.Parse(string(rs.Data))
+ }
+ }
+
+ // 3. run hook
+ resp, extra := private.HookProcReceive(ctx, repoUser, repoName, hookOptions)
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "HookProcReceive failed: %v", extra.Error)
+ }
+
+ // 4. response result to service
+ // # a. OK, but has an alternate reference. The alternate reference name
+ // # and other status can be given in option directives.
+ // H: PKT-LINE(ok <ref>)
+ // H: PKT-LINE(option refname <refname>)
+ // H: PKT-LINE(option old-oid <old-oid>)
+ // H: PKT-LINE(option new-oid <new-oid>)
+ // H: PKT-LINE(option forced-update)
+ // H: ... ...
+ // H: flush-pkt
+ // # b. NO, I reject it.
+ // H: PKT-LINE(ng <ref> <reason>)
+ // # c. Fall through, let 'receive-pack' to execute it.
+ // H: PKT-LINE(ok <ref>)
+ // H: PKT-LINE(option fall-through)
+
+ for _, rs := range resp.Results {
+ if len(rs.Err) > 0 {
+ err = writeDataPktLine(ctx, os.Stdout, []byte("ng "+rs.OriginalRef.String()+" "+rs.Err))
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ if rs.IsNotMatched {
+ err = writeDataPktLine(ctx, os.Stdout, []byte("ok "+rs.OriginalRef.String()))
+ if err != nil {
+ return err
+ }
+ err = writeDataPktLine(ctx, os.Stdout, []byte("option fall-through"))
+ if err != nil {
+ return err
+ }
+ continue
+ }
+
+ err = writeDataPktLine(ctx, os.Stdout, []byte("ok "+rs.OriginalRef))
+ if err != nil {
+ return err
+ }
+ err = writeDataPktLine(ctx, os.Stdout, []byte("option refname "+rs.Ref))
+ if err != nil {
+ return err
+ }
+ if !git.IsEmptyCommitID(rs.OldOID, nil) {
+ err = writeDataPktLine(ctx, os.Stdout, []byte("option old-oid "+rs.OldOID))
+ if err != nil {
+ return err
+ }
+ }
+ err = writeDataPktLine(ctx, os.Stdout, []byte("option new-oid "+rs.NewOID))
+ if err != nil {
+ return err
+ }
+ if rs.IsForcePush {
+ err = writeDataPktLine(ctx, os.Stdout, []byte("option forced-update"))
+ if err != nil {
+ return err
+ }
+ }
+ }
+ err = writeFlushPktLine(ctx, os.Stdout)
+
+ return err
+}
+
+// git PKT-Line api
+// pktLineType message type of pkt-line
+type pktLineType int64
+
+const (
+ // Unknown type
+ pktLineTypeUnknown pktLineType = 0
+ // flush-pkt "0000"
+ pktLineTypeFlush pktLineType = iota
+ // data line
+ pktLineTypeData
+)
+
+// gitPktLine pkt-line api
+type gitPktLine struct {
+ Type pktLineType
+ Length uint64
+ Data []byte
+}
+
+// Reads an Pkt-Line from `in`. If requestType is not unknown, it will a
+func readPktLine(ctx context.Context, in *bufio.Reader, requestType pktLineType) (*gitPktLine, error) {
+ // Read length prefix
+ lengthBytes := make([]byte, 4)
+ if n, err := in.Read(lengthBytes); n != 4 || err != nil {
+ return nil, fail(ctx, "Protocol: stdin error", "Pkt-Line: read stdin failed : %v", err)
+ }
+
+ var err error
+ r := &gitPktLine{}
+ r.Length, err = strconv.ParseUint(string(lengthBytes), 16, 32)
+ if err != nil {
+ return nil, fail(ctx, "Protocol: format parse error", "Pkt-Line format is wrong :%v", err)
+ }
+
+ if r.Length == 0 {
+ if requestType == pktLineTypeData {
+ return nil, fail(ctx, "Protocol: format data error", "Pkt-Line format is wrong")
+ }
+ r.Type = pktLineTypeFlush
+ return r, nil
+ }
+
+ if r.Length <= 4 || r.Length > 65520 || requestType == pktLineTypeFlush {
+ return nil, fail(ctx, "Protocol: format length error", "Pkt-Line format is wrong")
+ }
+
+ r.Data = make([]byte, r.Length-4)
+ if n, err := io.ReadFull(in, r.Data); uint64(n) != r.Length-4 || err != nil {
+ return nil, fail(ctx, "Protocol: stdin error", "Pkt-Line: read stdin failed : %v", err)
+ }
+
+ r.Type = pktLineTypeData
+
+ return r, nil
+}
+
+func writeFlushPktLine(ctx context.Context, out io.Writer) error {
+ l, err := out.Write([]byte("0000"))
+ if err != nil || l != 4 {
+ return fail(ctx, "Protocol: write error", "Pkt-Line response failed: %v", err)
+ }
+ return nil
+}
+
+// Write an Pkt-Line based on `data` to `out` according to the specification.
+// https://git-scm.com/docs/protocol-common
+func writeDataPktLine(ctx context.Context, out io.Writer, data []byte) error {
+ // Implementations SHOULD NOT send an empty pkt-line ("0004").
+ if len(data) == 0 {
+ return fail(ctx, "Protocol: write error", "Not allowed to write empty Pkt-Line")
+ }
+
+ length := uint64(len(data) + 4)
+
+ // The maximum length of a pkt-line’s data component is 65516 bytes.
+ // Implementations MUST NOT send pkt-line whose length exceeds 65520 (65516 bytes of payload + 4 bytes of length data).
+ if length > 65520 {
+ return fail(ctx, "Protocol: write error", "Pkt-Line exceeds maximum of 65520 bytes")
+ }
+
+ lr, err := fmt.Fprintf(out, "%04x", length)
+ if err != nil || lr != 4 {
+ return fail(ctx, "Protocol: write error", "Pkt-Line response failed: %v", err)
+ }
+
+ lr, err = out.Write(data)
+ if err != nil || int(length-4) != lr {
+ return fail(ctx, "Protocol: write error", "Pkt-Line response failed: %v", err)
+ }
+
+ return nil
+}
diff --git a/cmd/hook_test.go b/cmd/hook_test.go
new file mode 100644
index 0000000..514eb91
--- /dev/null
+++ b/cmd/hook_test.go
@@ -0,0 +1,196 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "bufio"
+ "bytes"
+ "context"
+ "io"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "strings"
+ "testing"
+ "time"
+
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/urfave/cli/v2"
+)
+
+// Capture what's being written into a standard file descriptor.
+func captureOutput(t *testing.T, stdFD *os.File) (finish func() (output string)) {
+ t.Helper()
+
+ r, w, err := os.Pipe()
+ require.NoError(t, err)
+ resetStdout := test.MockVariableValue(stdFD, *w)
+
+ return func() (output string) {
+ w.Close()
+ resetStdout()
+
+ out, err := io.ReadAll(r)
+ require.NoError(t, err)
+ return string(out)
+ }
+}
+
+func TestPktLine(t *testing.T) {
+ ctx := context.Background()
+
+ t.Run("Read", func(t *testing.T) {
+ s := strings.NewReader("0000")
+ r := bufio.NewReader(s)
+ result, err := readPktLine(ctx, r, pktLineTypeFlush)
+ require.NoError(t, err)
+ assert.Equal(t, pktLineTypeFlush, result.Type)
+
+ s = strings.NewReader("0006a\n")
+ r = bufio.NewReader(s)
+ result, err = readPktLine(ctx, r, pktLineTypeData)
+ require.NoError(t, err)
+ assert.Equal(t, pktLineTypeData, result.Type)
+ assert.Equal(t, []byte("a\n"), result.Data)
+
+ s = strings.NewReader("0004")
+ r = bufio.NewReader(s)
+ result, err = readPktLine(ctx, r, pktLineTypeData)
+ require.Error(t, err)
+ assert.Nil(t, result)
+
+ data := strings.Repeat("x", 65516)
+ r = bufio.NewReader(strings.NewReader("fff0" + data))
+ result, err = readPktLine(ctx, r, pktLineTypeData)
+ require.NoError(t, err)
+ assert.Equal(t, pktLineTypeData, result.Type)
+ assert.Equal(t, []byte(data), result.Data)
+
+ r = bufio.NewReader(strings.NewReader("fff1a"))
+ result, err = readPktLine(ctx, r, pktLineTypeData)
+ require.Error(t, err)
+ assert.Nil(t, result)
+ })
+
+ t.Run("Write", func(t *testing.T) {
+ w := bytes.NewBuffer([]byte{})
+ err := writeFlushPktLine(ctx, w)
+ require.NoError(t, err)
+ assert.Equal(t, []byte("0000"), w.Bytes())
+
+ w.Reset()
+ err = writeDataPktLine(ctx, w, []byte("a\nb"))
+ require.NoError(t, err)
+ assert.Equal(t, []byte("0007a\nb"), w.Bytes())
+
+ w.Reset()
+ data := bytes.Repeat([]byte{0x05}, 288)
+ err = writeDataPktLine(ctx, w, data)
+ require.NoError(t, err)
+ assert.Equal(t, append([]byte("0124"), data...), w.Bytes())
+
+ w.Reset()
+ err = writeDataPktLine(ctx, w, nil)
+ require.Error(t, err)
+ assert.Empty(t, w.Bytes())
+
+ w.Reset()
+ data = bytes.Repeat([]byte{0x64}, 65516)
+ err = writeDataPktLine(ctx, w, data)
+ require.NoError(t, err)
+ assert.Equal(t, append([]byte("fff0"), data...), w.Bytes())
+
+ w.Reset()
+ err = writeDataPktLine(ctx, w, bytes.Repeat([]byte{0x64}, 65516+1))
+ require.Error(t, err)
+ assert.Empty(t, w.Bytes())
+ })
+}
+
+func TestDelayWriter(t *testing.T) {
+ // Setup the environment.
+ defer test.MockVariableValue(&setting.InternalToken, "Random")()
+ defer test.MockVariableValue(&setting.InstallLock, true)()
+ defer test.MockVariableValue(&setting.Git.VerbosePush, true)()
+ t.Setenv("SSH_ORIGINAL_COMMAND", "true")
+
+ // Setup the Stdin.
+ f, err := os.OpenFile(t.TempDir()+"/stdin", os.O_RDWR|os.O_CREATE|os.O_EXCL, 0o666)
+ require.NoError(t, err)
+ _, err = f.Write([]byte("00000000000000000000 00000000000000000001 refs/head/main\n"))
+ require.NoError(t, err)
+ _, err = f.Seek(0, 0)
+ require.NoError(t, err)
+ defer test.MockVariableValue(os.Stdin, *f)()
+
+ // Setup the server that processes the hooks.
+ ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ time.Sleep(time.Millisecond * 600)
+ }))
+ defer ts.Close()
+ defer test.MockVariableValue(&setting.LocalURL, ts.URL+"/")()
+
+ app := cli.NewApp()
+ app.Commands = []*cli.Command{subcmdHookPreReceive}
+
+ t.Run("Should delay", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Git.VerbosePushDelay, time.Millisecond*500)()
+ finish := captureOutput(t, os.Stdout)
+
+ err = app.Run([]string{"./forgejo", "pre-receive"})
+ require.NoError(t, err)
+ out := finish()
+
+ require.Contains(t, out, "* Checking 1 references")
+ require.Contains(t, out, "Checked 1 references in total")
+ })
+
+ t.Run("Shouldn't delay", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Git.VerbosePushDelay, time.Second*5)()
+ finish := captureOutput(t, os.Stdout)
+
+ err = app.Run([]string{"./forgejo", "pre-receive"})
+ require.NoError(t, err)
+ out := finish()
+
+ require.NoError(t, err)
+ require.Empty(t, out)
+ })
+}
+
+func TestRunHookUpdate(t *testing.T) {
+ app := cli.NewApp()
+ app.Commands = []*cli.Command{subcmdHookUpdate}
+
+ t.Run("Removal of internal reference", func(t *testing.T) {
+ defer test.MockVariableValue(&cli.OsExiter, func(code int) {})()
+ defer test.MockVariableValue(&setting.IsProd, false)()
+ finish := captureOutput(t, os.Stderr)
+
+ err := app.Run([]string{"./forgejo", "update", "refs/pull/1/head", "0a51ae26bc73c47e2f754560c40904cf14ed51a9", "0000000000000000000000000000000000000000"})
+ out := finish()
+ require.Error(t, err)
+
+ assert.Contains(t, out, "The deletion of refs/pull/1/head is skipped as it's an internal reference.")
+ })
+
+ t.Run("Update of internal reference", func(t *testing.T) {
+ err := app.Run([]string{"./forgejo", "update", "refs/pull/1/head", "0a51ae26bc73c47e2f754560c40904cf14ed51a9", "0000000000000000000000000000000000000001"})
+ require.NoError(t, err)
+ })
+
+ t.Run("Removal of branch", func(t *testing.T) {
+ err := app.Run([]string{"./forgejo", "update", "refs/head/main", "0a51ae26bc73c47e2f754560c40904cf14ed51a9", "0000000000000000000000000000000000000000"})
+ require.NoError(t, err)
+ })
+
+ t.Run("Not enough arguments", func(t *testing.T) {
+ err := app.Run([]string{"./forgejo", "update"})
+ require.NoError(t, err)
+ })
+}
diff --git a/cmd/keys.go b/cmd/keys.go
new file mode 100644
index 0000000..81425a5
--- /dev/null
+++ b/cmd/keys.go
@@ -0,0 +1,83 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "strings"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/private"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdKeys represents the available keys sub-command
+var CmdKeys = &cli.Command{
+ Name: "keys",
+ Usage: "(internal) Should only be called by SSH server",
+ Description: "Queries the Forgejo database to get the authorized command for a given ssh key fingerprint",
+ Before: PrepareConsoleLoggerLevel(log.FATAL),
+ Action: runKeys,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "expected",
+ Aliases: []string{"e"},
+ Value: "git",
+ Usage: "Expected user for whom provide key commands",
+ },
+ &cli.StringFlag{
+ Name: "username",
+ Aliases: []string{"u"},
+ Value: "",
+ Usage: "Username trying to log in by SSH",
+ },
+ &cli.StringFlag{
+ Name: "type",
+ Aliases: []string{"t"},
+ Value: "",
+ Usage: "Type of the SSH key provided to the SSH Server (requires content to be provided too)",
+ },
+ &cli.StringFlag{
+ Name: "content",
+ Aliases: []string{"k"},
+ Value: "",
+ Usage: "Base64 encoded content of the SSH key provided to the SSH Server (requires type to be provided too)",
+ },
+ },
+}
+
+func runKeys(c *cli.Context) error {
+ if !c.IsSet("username") {
+ return errors.New("No username provided")
+ }
+ // Check username matches the expected username
+ if strings.TrimSpace(c.String("username")) != strings.TrimSpace(c.String("expected")) {
+ return nil
+ }
+
+ content := ""
+
+ if c.IsSet("type") && c.IsSet("content") {
+ content = fmt.Sprintf("%s %s", strings.TrimSpace(c.String("type")), strings.TrimSpace(c.String("content")))
+ }
+
+ if content == "" {
+ return errors.New("No key type and content provided")
+ }
+
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+
+ authorizedString, extra := private.AuthorizedPublicKeyByContent(ctx, content)
+ // do not use handleCliResponseExtra or cli.NewExitError, if it exists immediately, it breaks some tests like Test_CmdKeys
+ if extra.Error != nil {
+ return extra.Error
+ }
+ _, _ = fmt.Fprintln(c.App.Writer, strings.TrimSpace(authorizedString.Text))
+ return nil
+}
diff --git a/cmd/mailer.go b/cmd/mailer.go
new file mode 100644
index 0000000..0c5f2c8
--- /dev/null
+++ b/cmd/mailer.go
@@ -0,0 +1,50 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+func runSendMail(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setting.MustInstalled()
+
+ if err := argsSet(c, "title"); err != nil {
+ return err
+ }
+
+ subject := c.String("title")
+ confirmSkiped := c.Bool("force")
+ body := c.String("content")
+
+ if !confirmSkiped {
+ if len(body) == 0 {
+ fmt.Print("warning: Content is empty")
+ }
+
+ fmt.Print("Proceed with sending email? [Y/n] ")
+ isConfirmed, err := confirm()
+ if err != nil {
+ return err
+ } else if !isConfirmed {
+ fmt.Println("The mail was not sent")
+ return nil
+ }
+ }
+
+ respText, extra := private.SendEmail(ctx, subject, body, nil)
+ if extra.HasError() {
+ return handleCliResponseExtra(extra)
+ }
+ _, _ = fmt.Printf("Sent %s email(s) to all users\n", respText.Text)
+ return nil
+}
diff --git a/cmd/main.go b/cmd/main.go
new file mode 100644
index 0000000..b48a614
--- /dev/null
+++ b/cmd/main.go
@@ -0,0 +1,225 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "code.gitea.io/gitea/cmd/forgejo"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+// cmdHelp is our own help subcommand with more information
+// Keep in mind that the "./gitea help"(subcommand) is different from "./gitea --help"(flag), the flag doesn't parse the config or output "DEFAULT CONFIGURATION:" information
+func cmdHelp() *cli.Command {
+ c := &cli.Command{
+ Name: "help",
+ Aliases: []string{"h"},
+ Usage: "Shows a list of commands or help for one command",
+ ArgsUsage: "[command]",
+ Action: func(c *cli.Context) (err error) {
+ lineage := c.Lineage() // The order is from child to parent: help, doctor, Gitea, {Command:nil}
+ targetCmdIdx := 0
+ if c.Command.Name == "help" {
+ targetCmdIdx = 1
+ }
+ if lineage[targetCmdIdx+1].Command != nil {
+ err = cli.ShowCommandHelp(lineage[targetCmdIdx+1], lineage[targetCmdIdx].Command.Name)
+ } else {
+ err = cli.ShowAppHelp(c)
+ }
+ _, _ = fmt.Fprintf(c.App.Writer, `
+DEFAULT CONFIGURATION:
+ AppPath: %s
+ WorkPath: %s
+ CustomPath: %s
+ ConfigFile: %s
+
+`, setting.AppPath, setting.AppWorkPath, setting.CustomPath, setting.CustomConf)
+ return err
+ },
+ }
+ return c
+}
+
+func appGlobalFlags() []cli.Flag {
+ return []cli.Flag{
+ // make the builtin flags at the top
+ cli.HelpFlag,
+
+ // shared configuration flags, they are for global and for each sub-command at the same time
+ // eg: such command is valid: "./gitea --config /tmp/app.ini web --config /tmp/app.ini", while it's discouraged indeed
+ // keep in mind that the short flags like "-C", "-c" and "-w" are globally polluted, they can't be used for sub-commands anymore.
+ &cli.StringFlag{
+ Name: "custom-path",
+ Aliases: []string{"C"},
+ Usage: "Set custom path (defaults to '{WorkPath}/custom')",
+ },
+ &cli.StringFlag{
+ Name: "config",
+ Aliases: []string{"c"},
+ Value: setting.CustomConf,
+ Usage: "Set custom config file (defaults to '{WorkPath}/custom/conf/app.ini')",
+ },
+ &cli.StringFlag{
+ Name: "work-path",
+ Aliases: []string{"w"},
+ Usage: "Set Forgejo's working path (defaults to the directory of the Forgejo binary)",
+ },
+ }
+}
+
+func prepareSubcommandWithConfig(command *cli.Command, globalFlags []cli.Flag) {
+ command.Flags = append(append([]cli.Flag{}, globalFlags...), command.Flags...)
+ command.Action = prepareWorkPathAndCustomConf(command.Action)
+ command.HideHelp = true
+ if command.Name != "help" {
+ command.Subcommands = append(command.Subcommands, cmdHelp())
+ }
+ for i := range command.Subcommands {
+ prepareSubcommandWithConfig(command.Subcommands[i], globalFlags)
+ }
+}
+
+// prepareWorkPathAndCustomConf wraps the Action to prepare the work path and custom config
+// It can't use "Before", because each level's sub-command's Before will be called one by one, so the "init" would be done multiple times
+func prepareWorkPathAndCustomConf(action cli.ActionFunc) func(ctx *cli.Context) error {
+ return func(ctx *cli.Context) error {
+ var args setting.ArgWorkPathAndCustomConf
+ // from children to parent, check the global flags
+ for _, curCtx := range ctx.Lineage() {
+ if curCtx.IsSet("work-path") && args.WorkPath == "" {
+ args.WorkPath = curCtx.String("work-path")
+ }
+ if curCtx.IsSet("custom-path") && args.CustomPath == "" {
+ args.CustomPath = curCtx.String("custom-path")
+ }
+ if curCtx.IsSet("config") && args.CustomConf == "" {
+ args.CustomConf = curCtx.String("config")
+ }
+ }
+ setting.InitWorkPathAndCommonConfig(os.Getenv, args)
+ if ctx.Bool("help") || action == nil {
+ // the default behavior of "urfave/cli": "nil action" means "show help"
+ return cmdHelp().Action(ctx)
+ }
+ return action(ctx)
+ }
+}
+
+func NewMainApp(version, versionExtra string) *cli.App {
+ path, err := os.Executable()
+ if err != nil {
+ panic(err)
+ }
+ executable := filepath.Base(path)
+
+ var subCmdsStandalone []*cli.Command = make([]*cli.Command, 0, 10)
+ var subCmdWithConfig []*cli.Command = make([]*cli.Command, 0, 10)
+ var globalFlags []cli.Flag = make([]cli.Flag, 0, 10)
+
+ //
+ // If the executable is forgejo-cli, provide a Forgejo specific CLI
+ // that is NOT compatible with Gitea.
+ //
+ if executable == "forgejo-cli" {
+ subCmdsStandalone = append(subCmdsStandalone, forgejo.CmdActions(context.Background()))
+ subCmdWithConfig = append(subCmdWithConfig, forgejo.CmdF3(context.Background()))
+ globalFlags = append(globalFlags, []cli.Flag{
+ &cli.BoolFlag{
+ Name: "quiet",
+ },
+ &cli.BoolFlag{
+ Name: "verbose",
+ },
+ }...)
+ } else {
+ //
+ // Otherwise provide a Gitea compatible CLI which includes Forgejo
+ // specific additions under the forgejo-cli subcommand. It allows
+ // admins to migration from Gitea to Forgejo by replacing the gitea
+ // binary and rename it to forgejo if they want.
+ //
+ subCmdsStandalone = append(subCmdsStandalone, forgejo.CmdForgejo(context.Background()))
+ subCmdWithConfig = append(subCmdWithConfig, CmdActions)
+ }
+
+ return innerNewMainApp(version, versionExtra, subCmdsStandalone, subCmdWithConfig, globalFlags)
+}
+
+func innerNewMainApp(version, versionExtra string, subCmdsStandaloneArgs, subCmdWithConfigArgs []*cli.Command, globalFlagsArgs []cli.Flag) *cli.App {
+ app := cli.NewApp()
+ app.HelpName = "forgejo"
+ app.Name = "Forgejo"
+ app.Usage = "Beyond coding. We forge."
+ app.Description = `By default, forgejo will start serving using the web-server with no argument, which can alternatively be run by running the subcommand "web".`
+ app.Version = version + versionExtra
+ app.EnableBashCompletion = true
+
+ // these sub-commands need to use config file
+ subCmdWithConfig := []*cli.Command{
+ cmdHelp(), // the "help" sub-command was used to show the more information for "work path" and "custom config"
+ CmdWeb,
+ CmdServ,
+ CmdHook,
+ CmdKeys,
+ CmdDump,
+ CmdAdmin,
+ CmdMigrate,
+ CmdDoctor,
+ CmdManager,
+ CmdEmbedded,
+ CmdMigrateStorage,
+ CmdDumpRepository,
+ CmdRestoreRepository,
+ }
+
+ subCmdWithConfig = append(subCmdWithConfig, subCmdWithConfigArgs...)
+
+ // these sub-commands do not need the config file, and they do not depend on any path or environment variable.
+ subCmdStandalone := []*cli.Command{
+ CmdCert,
+ CmdGenerate,
+ CmdDocs,
+ }
+ subCmdStandalone = append(subCmdStandalone, subCmdsStandaloneArgs...)
+
+ app.DefaultCommand = CmdWeb.Name
+
+ globalFlags := appGlobalFlags()
+ globalFlags = append(globalFlags, globalFlagsArgs...)
+ app.Flags = append(app.Flags, cli.VersionFlag)
+ app.Flags = append(app.Flags, globalFlags...)
+ app.HideHelp = true // use our own help action to show helps (with more information like default config)
+ app.Before = PrepareConsoleLoggerLevel(log.INFO)
+ for i := range subCmdWithConfig {
+ prepareSubcommandWithConfig(subCmdWithConfig[i], globalFlags)
+ }
+ app.Commands = append(app.Commands, subCmdWithConfig...)
+ app.Commands = append(app.Commands, subCmdStandalone...)
+
+ return app
+}
+
+func RunMainApp(app *cli.App, args ...string) error {
+ err := app.Run(args)
+ if err == nil {
+ return nil
+ }
+ if strings.HasPrefix(err.Error(), "flag provided but not defined:") {
+ // the cli package should already have output the error message, so just exit
+ cli.OsExiter(1)
+ return err
+ }
+ _, _ = fmt.Fprintf(app.ErrWriter, "Command error: %v\n", err)
+ cli.OsExiter(1)
+ return err
+}
diff --git a/cmd/main_test.go b/cmd/main_test.go
new file mode 100644
index 0000000..432f2b9
--- /dev/null
+++ b/cmd/main_test.go
@@ -0,0 +1,179 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "fmt"
+ "io"
+ "os"
+ "path/filepath"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/models/unittest"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/test"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+ "github.com/urfave/cli/v2"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m)
+}
+
+func makePathOutput(workPath, customPath, customConf string) string {
+ return fmt.Sprintf("WorkPath=%s\nCustomPath=%s\nCustomConf=%s", workPath, customPath, customConf)
+}
+
+func newTestApp(testCmdAction func(ctx *cli.Context) error) *cli.App {
+ app := NewMainApp("version", "version-extra")
+ testCmd := &cli.Command{Name: "test-cmd", Action: testCmdAction}
+ prepareSubcommandWithConfig(testCmd, appGlobalFlags())
+ app.Commands = append(app.Commands, testCmd)
+ app.DefaultCommand = testCmd.Name
+ return app
+}
+
+type runResult struct {
+ Stdout string
+ Stderr string
+ ExitCode int
+}
+
+func runTestApp(app *cli.App, args ...string) (runResult, error) {
+ outBuf := new(strings.Builder)
+ errBuf := new(strings.Builder)
+ app.Writer = outBuf
+ app.ErrWriter = errBuf
+ exitCode := -1
+ defer test.MockVariableValue(&cli.ErrWriter, app.ErrWriter)()
+ defer test.MockVariableValue(&cli.OsExiter, func(code int) {
+ if exitCode == -1 {
+ exitCode = code // save the exit code once and then reset the writer (to simulate the exit)
+ app.Writer, app.ErrWriter, cli.ErrWriter = io.Discard, io.Discard, io.Discard
+ }
+ })()
+ err := RunMainApp(app, args...)
+ return runResult{outBuf.String(), errBuf.String(), exitCode}, err
+}
+
+func TestCliCmd(t *testing.T) {
+ defaultWorkPath := filepath.Dir(setting.AppPath)
+ defaultCustomPath := filepath.Join(defaultWorkPath, "custom")
+ defaultCustomConf := filepath.Join(defaultCustomPath, "conf/app.ini")
+
+ cli.CommandHelpTemplate = "(command help template)"
+ cli.AppHelpTemplate = "(app help template)"
+ cli.SubcommandHelpTemplate = "(subcommand help template)"
+
+ cases := []struct {
+ env map[string]string
+ cmd string
+ exp string
+ }{
+ // main command help
+ {
+ cmd: "./gitea help",
+ exp: "DEFAULT CONFIGURATION:",
+ },
+
+ // parse paths
+ {
+ cmd: "./gitea test-cmd",
+ exp: makePathOutput(defaultWorkPath, defaultCustomPath, defaultCustomConf),
+ },
+ {
+ cmd: "./gitea -c /tmp/app.ini test-cmd",
+ exp: makePathOutput(defaultWorkPath, defaultCustomPath, "/tmp/app.ini"),
+ },
+ {
+ cmd: "./gitea test-cmd -c /tmp/app.ini",
+ exp: makePathOutput(defaultWorkPath, defaultCustomPath, "/tmp/app.ini"),
+ },
+ {
+ env: map[string]string{"GITEA_WORK_DIR": "/tmp"},
+ cmd: "./gitea test-cmd",
+ exp: makePathOutput("/tmp", "/tmp/custom", "/tmp/custom/conf/app.ini"),
+ },
+ {
+ env: map[string]string{"GITEA_WORK_DIR": "/tmp"},
+ cmd: "./gitea test-cmd --work-path /tmp/other",
+ exp: makePathOutput("/tmp/other", "/tmp/other/custom", "/tmp/other/custom/conf/app.ini"),
+ },
+ {
+ env: map[string]string{"GITEA_WORK_DIR": "/tmp"},
+ cmd: "./gitea test-cmd --config /tmp/app-other.ini",
+ exp: makePathOutput("/tmp", "/tmp/custom", "/tmp/app-other.ini"),
+ },
+ }
+
+ app := newTestApp(func(ctx *cli.Context) error {
+ _, _ = fmt.Fprint(ctx.App.Writer, makePathOutput(setting.AppWorkPath, setting.CustomPath, setting.CustomConf))
+ return nil
+ })
+ var envBackup []string
+ for _, s := range os.Environ() {
+ if strings.HasPrefix(s, "GITEA_") && strings.Contains(s, "=") {
+ envBackup = append(envBackup, s)
+ }
+ }
+ clearGiteaEnv := func() {
+ for _, s := range os.Environ() {
+ if strings.HasPrefix(s, "GITEA_") {
+ _ = os.Unsetenv(s)
+ }
+ }
+ }
+ defer func() {
+ clearGiteaEnv()
+ for _, s := range envBackup {
+ k, v, _ := strings.Cut(s, "=")
+ _ = os.Setenv(k, v)
+ }
+ }()
+
+ for _, c := range cases {
+ clearGiteaEnv()
+ for k, v := range c.env {
+ _ = os.Setenv(k, v)
+ }
+ args := strings.Split(c.cmd, " ") // for test only, "split" is good enough
+ r, err := runTestApp(app, args...)
+ require.NoError(t, err, c.cmd)
+ assert.NotEmpty(t, c.exp, c.cmd)
+ assert.Contains(t, r.Stdout, c.exp, c.cmd)
+ }
+}
+
+func TestCliCmdError(t *testing.T) {
+ app := newTestApp(func(ctx *cli.Context) error { return fmt.Errorf("normal error") })
+ r, err := runTestApp(app, "./gitea", "test-cmd")
+ require.Error(t, err)
+ assert.Equal(t, 1, r.ExitCode)
+ assert.Equal(t, "", r.Stdout)
+ assert.Equal(t, "Command error: normal error\n", r.Stderr)
+
+ app = newTestApp(func(ctx *cli.Context) error { return cli.Exit("exit error", 2) })
+ r, err = runTestApp(app, "./gitea", "test-cmd")
+ require.Error(t, err)
+ assert.Equal(t, 2, r.ExitCode)
+ assert.Equal(t, "", r.Stdout)
+ assert.Equal(t, "exit error\n", r.Stderr)
+
+ app = newTestApp(func(ctx *cli.Context) error { return nil })
+ r, err = runTestApp(app, "./gitea", "test-cmd", "--no-such")
+ require.Error(t, err)
+ assert.Equal(t, 1, r.ExitCode)
+ assert.Equal(t, "Incorrect Usage: flag provided but not defined: -no-such\n\n", r.Stdout)
+ assert.Equal(t, "", r.Stderr) // the cli package's strange behavior, the error message is not in stderr ....
+
+ app = newTestApp(func(ctx *cli.Context) error { return nil })
+ r, err = runTestApp(app, "./gitea", "test-cmd")
+ require.NoError(t, err)
+ assert.Equal(t, -1, r.ExitCode) // the cli.OsExiter is not called
+ assert.Equal(t, "", r.Stdout)
+ assert.Equal(t, "", r.Stderr)
+}
diff --git a/cmd/manager.go b/cmd/manager.go
new file mode 100644
index 0000000..b74771e
--- /dev/null
+++ b/cmd/manager.go
@@ -0,0 +1,154 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "os"
+ "time"
+
+ "code.gitea.io/gitea/modules/private"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ // CmdManager represents the manager command
+ CmdManager = &cli.Command{
+ Name: "manager",
+ Usage: "Manage the running forgejo process",
+ Description: "This is a command for managing the running forgejo process",
+ Subcommands: []*cli.Command{
+ subcmdShutdown,
+ subcmdRestart,
+ subcmdReloadTemplates,
+ subcmdFlushQueues,
+ subcmdLogging,
+ subCmdProcesses,
+ },
+ }
+ subcmdShutdown = &cli.Command{
+ Name: "shutdown",
+ Usage: "Gracefully shutdown the running process",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runShutdown,
+ }
+ subcmdRestart = &cli.Command{
+ Name: "restart",
+ Usage: "Gracefully restart the running process - (not implemented for windows servers)",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runRestart,
+ }
+ subcmdReloadTemplates = &cli.Command{
+ Name: "reload-templates",
+ Usage: "Reload template files in the running process",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runReloadTemplates,
+ }
+ subcmdFlushQueues = &cli.Command{
+ Name: "flush-queues",
+ Usage: "Flush queues in the running process",
+ Action: runFlushQueues,
+ Flags: []cli.Flag{
+ &cli.DurationFlag{
+ Name: "timeout",
+ Value: 60 * time.Second,
+ Usage: "Timeout for the flushing process",
+ },
+ &cli.BoolFlag{
+ Name: "non-blocking",
+ Usage: "Set to true to not wait for flush to complete before returning",
+ },
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ }
+ subCmdProcesses = &cli.Command{
+ Name: "processes",
+ Usage: "Display running processes within the current process",
+ Action: runProcesses,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ &cli.BoolFlag{
+ Name: "flat",
+ Usage: "Show processes as flat table rather than as tree",
+ },
+ &cli.BoolFlag{
+ Name: "no-system",
+ Usage: "Do not show system processes",
+ },
+ &cli.BoolFlag{
+ Name: "stacktraces",
+ Usage: "Show stacktraces",
+ },
+ &cli.BoolFlag{
+ Name: "json",
+ Usage: "Output as json",
+ },
+ &cli.StringFlag{
+ Name: "cancel",
+ Usage: "Process PID to cancel. (Only available for non-system processes.)",
+ },
+ },
+ }
+)
+
+func runShutdown(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ extra := private.Shutdown(ctx)
+ return handleCliResponseExtra(extra)
+}
+
+func runRestart(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ extra := private.Restart(ctx)
+ return handleCliResponseExtra(extra)
+}
+
+func runReloadTemplates(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ extra := private.ReloadTemplates(ctx)
+ return handleCliResponseExtra(extra)
+}
+
+func runFlushQueues(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ extra := private.FlushQueues(ctx, c.Duration("timeout"), c.Bool("non-blocking"))
+ return handleCliResponseExtra(extra)
+}
+
+func runProcesses(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ extra := private.Processes(ctx, os.Stdout, c.Bool("flat"), c.Bool("no-system"), c.Bool("stacktraces"), c.Bool("json"), c.String("cancel"))
+ return handleCliResponseExtra(extra)
+}
diff --git a/cmd/manager_logging.go b/cmd/manager_logging.go
new file mode 100644
index 0000000..6049b00
--- /dev/null
+++ b/cmd/manager_logging.go
@@ -0,0 +1,347 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "errors"
+ "fmt"
+ "os"
+
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/private"
+
+ "github.com/urfave/cli/v2"
+)
+
+var (
+ defaultLoggingFlags = []cli.Flag{
+ &cli.StringFlag{
+ Name: "logger",
+ Usage: `Logger name - will default to "default"`,
+ },
+ &cli.StringFlag{
+ Name: "writer",
+ Usage: "Name of the log writer - will default to mode",
+ },
+ &cli.StringFlag{
+ Name: "level",
+ Usage: "Logging level for the new logger",
+ },
+ &cli.StringFlag{
+ Name: "stacktrace-level",
+ Aliases: []string{"L"},
+ Usage: "Stacktrace logging level",
+ },
+ &cli.StringFlag{
+ Name: "flags",
+ Aliases: []string{"F"},
+ Usage: "Flags for the logger",
+ },
+ &cli.StringFlag{
+ Name: "expression",
+ Aliases: []string{"e"},
+ Usage: "Matching expression for the logger",
+ },
+ &cli.StringFlag{
+ Name: "prefix",
+ Aliases: []string{"p"},
+ Usage: "Prefix for the logger",
+ },
+ &cli.BoolFlag{
+ Name: "color",
+ Usage: "Use color in the logs",
+ },
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ }
+
+ subcmdLogging = &cli.Command{
+ Name: "logging",
+ Usage: "Adjust logging commands",
+ Subcommands: []*cli.Command{
+ {
+ Name: "pause",
+ Usage: "Pause logging (Forgejo will buffer logs up to a certain point and will drop them after that point)",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runPauseLogging,
+ }, {
+ Name: "resume",
+ Usage: "Resume logging",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runResumeLogging,
+ }, {
+ Name: "release-and-reopen",
+ Usage: "Cause Forgejo to release and re-open files used for logging",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+ Action: runReleaseReopenLogging,
+ }, {
+ Name: "remove",
+ Usage: "Remove a logger",
+ ArgsUsage: "[name] Name of logger to remove",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ }, &cli.StringFlag{
+ Name: "logger",
+ Usage: `Logger name - will default to "default"`,
+ },
+ },
+ Action: runRemoveLogger,
+ }, {
+ Name: "add",
+ Usage: "Add a logger",
+ Subcommands: []*cli.Command{
+ {
+ Name: "file",
+ Usage: "Add a file logger",
+ Flags: append(defaultLoggingFlags, []cli.Flag{
+ &cli.StringFlag{
+ Name: "filename",
+ Aliases: []string{"f"},
+ Usage: "Filename for the logger - this must be set.",
+ },
+ &cli.BoolFlag{
+ Name: "rotate",
+ Aliases: []string{"r"},
+ Usage: "Rotate logs",
+ Value: true,
+ },
+ &cli.Int64Flag{
+ Name: "max-size",
+ Aliases: []string{"s"},
+ Usage: "Maximum size in bytes before rotation",
+ },
+ &cli.BoolFlag{
+ Name: "daily",
+ Aliases: []string{"d"},
+ Usage: "Rotate logs daily",
+ Value: true,
+ },
+ &cli.IntFlag{
+ Name: "max-days",
+ Aliases: []string{"D"},
+ Usage: "Maximum number of daily logs to keep",
+ },
+ &cli.BoolFlag{
+ Name: "compress",
+ Aliases: []string{"z"},
+ Usage: "Compress rotated logs",
+ Value: true,
+ },
+ &cli.IntFlag{
+ Name: "compression-level",
+ Aliases: []string{"Z"},
+ Usage: "Compression level to use",
+ },
+ }...),
+ Action: runAddFileLogger,
+ }, {
+ Name: "conn",
+ Usage: "Add a net conn logger",
+ Flags: append(defaultLoggingFlags, []cli.Flag{
+ &cli.BoolFlag{
+ Name: "reconnect-on-message",
+ Aliases: []string{"R"},
+ Usage: "Reconnect to host for every message",
+ },
+ &cli.BoolFlag{
+ Name: "reconnect",
+ Aliases: []string{"r"},
+ Usage: "Reconnect to host when connection is dropped",
+ },
+ &cli.StringFlag{
+ Name: "protocol",
+ Aliases: []string{"P"},
+ Usage: "Set protocol to use: tcp, unix, or udp (defaults to tcp)",
+ },
+ &cli.StringFlag{
+ Name: "address",
+ Aliases: []string{"a"},
+ Usage: "Host address and port to connect to (defaults to :7020)",
+ },
+ }...),
+ Action: runAddConnLogger,
+ },
+ },
+ }, {
+ Name: "log-sql",
+ Usage: "Set LogSQL",
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ &cli.BoolFlag{
+ Name: "off",
+ Usage: "Switch off SQL logging",
+ },
+ },
+ Action: runSetLogSQL,
+ },
+ },
+ }
+)
+
+func runRemoveLogger(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ logger := c.String("logger")
+ if len(logger) == 0 {
+ logger = log.DEFAULT
+ }
+ writer := c.Args().First()
+
+ extra := private.RemoveLogger(ctx, logger, writer)
+ return handleCliResponseExtra(extra)
+}
+
+func runAddConnLogger(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ vals := map[string]any{}
+ mode := "conn"
+ vals["net"] = "tcp"
+ if c.IsSet("protocol") {
+ switch c.String("protocol") {
+ case "udp":
+ vals["net"] = "udp"
+ case "unix":
+ vals["net"] = "unix"
+ }
+ }
+ if c.IsSet("address") {
+ vals["address"] = c.String("address")
+ } else {
+ vals["address"] = ":7020"
+ }
+ if c.IsSet("reconnect") {
+ vals["reconnect"] = c.Bool("reconnect")
+ }
+ if c.IsSet("reconnect-on-message") {
+ vals["reconnectOnMsg"] = c.Bool("reconnect-on-message")
+ }
+ return commonAddLogger(c, mode, vals)
+}
+
+func runAddFileLogger(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ vals := map[string]any{}
+ mode := "file"
+ if c.IsSet("filename") {
+ vals["filename"] = c.String("filename")
+ } else {
+ return errors.New("filename must be set when creating a file logger")
+ }
+ if c.IsSet("rotate") {
+ vals["rotate"] = c.Bool("rotate")
+ }
+ if c.IsSet("max-size") {
+ vals["maxsize"] = c.Int64("max-size")
+ }
+ if c.IsSet("daily") {
+ vals["daily"] = c.Bool("daily")
+ }
+ if c.IsSet("max-days") {
+ vals["maxdays"] = c.Int("max-days")
+ }
+ if c.IsSet("compress") {
+ vals["compress"] = c.Bool("compress")
+ }
+ if c.IsSet("compression-level") {
+ vals["compressionLevel"] = c.Int("compression-level")
+ }
+ return commonAddLogger(c, mode, vals)
+}
+
+func commonAddLogger(c *cli.Context, mode string, vals map[string]any) error {
+ if len(c.String("level")) > 0 {
+ vals["level"] = log.LevelFromString(c.String("level")).String()
+ }
+ if len(c.String("stacktrace-level")) > 0 {
+ vals["stacktraceLevel"] = log.LevelFromString(c.String("stacktrace-level")).String()
+ }
+ if len(c.String("expression")) > 0 {
+ vals["expression"] = c.String("expression")
+ }
+ if len(c.String("prefix")) > 0 {
+ vals["prefix"] = c.String("prefix")
+ }
+ if len(c.String("flags")) > 0 {
+ vals["flags"] = log.FlagsFromString(c.String("flags"))
+ }
+ if c.IsSet("color") {
+ vals["colorize"] = c.Bool("color")
+ }
+ logger := log.DEFAULT
+ if c.IsSet("logger") {
+ logger = c.String("logger")
+ }
+ writer := mode
+ if c.IsSet("writer") {
+ writer = c.String("writer")
+ }
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ extra := private.AddLogger(ctx, logger, writer, mode, vals)
+ return handleCliResponseExtra(extra)
+}
+
+func runPauseLogging(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ userMsg := private.PauseLogging(ctx)
+ _, _ = fmt.Fprintln(os.Stdout, userMsg)
+ return nil
+}
+
+func runResumeLogging(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ userMsg := private.ResumeLogging(ctx)
+ _, _ = fmt.Fprintln(os.Stdout, userMsg)
+ return nil
+}
+
+func runReleaseReopenLogging(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setup(ctx, c.Bool("debug"))
+ userMsg := private.ReleaseReopenLogging(ctx)
+ _, _ = fmt.Fprintln(os.Stdout, userMsg)
+ return nil
+}
+
+func runSetLogSQL(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+ setup(ctx, c.Bool("debug"))
+
+ extra := private.SetLogSQL(ctx, !c.Bool("off"))
+ return handleCliResponseExtra(extra)
+}
diff --git a/cmd/migrate.go b/cmd/migrate.go
new file mode 100644
index 0000000..e81b862
--- /dev/null
+++ b/cmd/migrate.go
@@ -0,0 +1,45 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/migrations"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdMigrate represents the available migrate sub-command.
+var CmdMigrate = &cli.Command{
+ Name: "migrate",
+ Usage: "Migrate the database",
+ Description: "This is a command for migrating the database, so that you can run gitea admin user create before starting the server.",
+ Action: runMigrate,
+}
+
+func runMigrate(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(stdCtx); err != nil {
+ return err
+ }
+
+ log.Info("AppPath: %s", setting.AppPath)
+ log.Info("AppWorkPath: %s", setting.AppWorkPath)
+ log.Info("Custom path: %s", setting.CustomPath)
+ log.Info("Log path: %s", setting.Log.RootPath)
+ log.Info("Configuration file: %s", setting.CustomConf)
+
+ if err := db.InitEngineWithMigration(context.Background(), migrations.Migrate); err != nil {
+ log.Fatal("Failed to initialize ORM engine: %v", err)
+ return err
+ }
+
+ return nil
+}
diff --git a/cmd/migrate_storage.go b/cmd/migrate_storage.go
new file mode 100644
index 0000000..3a69b55
--- /dev/null
+++ b/cmd/migrate_storage.go
@@ -0,0 +1,267 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "errors"
+ "fmt"
+ "io/fs"
+ "strings"
+
+ actions_model "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/migrations"
+ packages_model "code.gitea.io/gitea/models/packages"
+ repo_model "code.gitea.io/gitea/models/repo"
+ user_model "code.gitea.io/gitea/models/user"
+ "code.gitea.io/gitea/modules/log"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdMigrateStorage represents the available migrate storage sub-command.
+var CmdMigrateStorage = &cli.Command{
+ Name: "migrate-storage",
+ Usage: "Migrate the storage",
+ Description: "Copies stored files from storage configured in app.ini to parameter-configured storage",
+ Action: runMigrateStorage,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "type",
+ Aliases: []string{"t"},
+ Value: "",
+ Usage: "Type of stored files to copy. Allowed types: 'attachments', 'lfs', 'avatars', 'repo-avatars', 'repo-archivers', 'packages', 'actions-log', 'actions-artifacts'",
+ },
+ &cli.StringFlag{
+ Name: "storage",
+ Aliases: []string{"s"},
+ Value: "",
+ Usage: "New storage type: local (default) or minio",
+ },
+ &cli.StringFlag{
+ Name: "path",
+ Aliases: []string{"p"},
+ Value: "",
+ Usage: "New storage placement if store is local (leave blank for default)",
+ },
+ &cli.StringFlag{
+ Name: "minio-endpoint",
+ Value: "",
+ Usage: "Minio storage endpoint",
+ },
+ &cli.StringFlag{
+ Name: "minio-access-key-id",
+ Value: "",
+ Usage: "Minio storage accessKeyID",
+ },
+ &cli.StringFlag{
+ Name: "minio-secret-access-key",
+ Value: "",
+ Usage: "Minio storage secretAccessKey",
+ },
+ &cli.StringFlag{
+ Name: "minio-bucket",
+ Value: "",
+ Usage: "Minio storage bucket",
+ },
+ &cli.StringFlag{
+ Name: "minio-location",
+ Value: "",
+ Usage: "Minio storage location to create bucket",
+ },
+ &cli.StringFlag{
+ Name: "minio-base-path",
+ Value: "",
+ Usage: "Minio storage base path on the bucket",
+ },
+ &cli.BoolFlag{
+ Name: "minio-use-ssl",
+ Usage: "Enable SSL for minio",
+ },
+ &cli.BoolFlag{
+ Name: "minio-insecure-skip-verify",
+ Usage: "Skip SSL verification",
+ },
+ &cli.StringFlag{
+ Name: "minio-checksum-algorithm",
+ Value: "",
+ Usage: "Minio checksum algorithm (default/md5)",
+ },
+ },
+}
+
+func migrateAttachments(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, attach *repo_model.Attachment) error {
+ _, err := storage.Copy(dstStorage, attach.RelativePath(), storage.Attachments, attach.RelativePath())
+ return err
+ })
+}
+
+func migrateLFS(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, mo *git_model.LFSMetaObject) error {
+ _, err := storage.Copy(dstStorage, mo.RelativePath(), storage.LFS, mo.RelativePath())
+ return err
+ })
+}
+
+func migrateAvatars(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, user *user_model.User) error {
+ if user.CustomAvatarRelativePath() == "" {
+ return nil
+ }
+ _, err := storage.Copy(dstStorage, user.CustomAvatarRelativePath(), storage.Avatars, user.CustomAvatarRelativePath())
+ return err
+ })
+}
+
+func migrateRepoAvatars(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, repo *repo_model.Repository) error {
+ if repo.CustomAvatarRelativePath() == "" {
+ return nil
+ }
+ _, err := storage.Copy(dstStorage, repo.CustomAvatarRelativePath(), storage.RepoAvatars, repo.CustomAvatarRelativePath())
+ return err
+ })
+}
+
+func migrateRepoArchivers(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, archiver *repo_model.RepoArchiver) error {
+ p := archiver.RelativePath()
+ _, err := storage.Copy(dstStorage, p, storage.RepoArchives, p)
+ return err
+ })
+}
+
+func migratePackages(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, pb *packages_model.PackageBlob) error {
+ p := packages_module.KeyToRelativePath(packages_module.BlobHash256Key(pb.HashSHA256))
+ _, err := storage.Copy(dstStorage, p, storage.Packages, p)
+ return err
+ })
+}
+
+func migrateActionsLog(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, task *actions_model.ActionTask) error {
+ if task.LogExpired {
+ // the log has been cleared
+ return nil
+ }
+ if !task.LogInStorage {
+ // running tasks store logs in DBFS
+ return nil
+ }
+ p := task.LogFilename
+ _, err := storage.Copy(dstStorage, p, storage.Actions, p)
+ return err
+ })
+}
+
+func migrateActionsArtifacts(ctx context.Context, dstStorage storage.ObjectStorage) error {
+ return db.Iterate(ctx, nil, func(ctx context.Context, artifact *actions_model.ActionArtifact) error {
+ if artifact.Status == int64(actions_model.ArtifactStatusExpired) {
+ return nil
+ }
+
+ _, err := storage.Copy(dstStorage, artifact.StoragePath, storage.ActionsArtifacts, artifact.StoragePath)
+ if err != nil {
+ if errors.Is(err, fs.ErrNotExist) {
+ log.Warn("ignored: actions artifact %s exists in the database but not in storage", artifact.StoragePath)
+ return nil
+ }
+ return err
+ }
+
+ return nil
+ })
+}
+
+func runMigrateStorage(ctx *cli.Context) error {
+ stdCtx, cancel := installSignals()
+ defer cancel()
+
+ if err := initDB(stdCtx); err != nil {
+ return err
+ }
+
+ log.Info("AppPath: %s", setting.AppPath)
+ log.Info("AppWorkPath: %s", setting.AppWorkPath)
+ log.Info("Custom path: %s", setting.CustomPath)
+ log.Info("Log path: %s", setting.Log.RootPath)
+ log.Info("Configuration file: %s", setting.CustomConf)
+
+ if err := db.InitEngineWithMigration(context.Background(), migrations.Migrate); err != nil {
+ log.Fatal("Failed to initialize ORM engine: %v", err)
+ return err
+ }
+
+ if err := storage.Init(); err != nil {
+ return err
+ }
+
+ var dstStorage storage.ObjectStorage
+ var err error
+ switch strings.ToLower(ctx.String("storage")) {
+ case "":
+ fallthrough
+ case string(setting.LocalStorageType):
+ p := ctx.String("path")
+ if p == "" {
+ log.Fatal("Path must be given when storage is local")
+ return nil
+ }
+ dstStorage, err = storage.NewLocalStorage(
+ stdCtx,
+ &setting.Storage{
+ Path: p,
+ })
+ case string(setting.MinioStorageType):
+ dstStorage, err = storage.NewMinioStorage(
+ stdCtx,
+ &setting.Storage{
+ MinioConfig: setting.MinioStorageConfig{
+ Endpoint: ctx.String("minio-endpoint"),
+ AccessKeyID: ctx.String("minio-access-key-id"),
+ SecretAccessKey: ctx.String("minio-secret-access-key"),
+ Bucket: ctx.String("minio-bucket"),
+ Location: ctx.String("minio-location"),
+ BasePath: ctx.String("minio-base-path"),
+ UseSSL: ctx.Bool("minio-use-ssl"),
+ InsecureSkipVerify: ctx.Bool("minio-insecure-skip-verify"),
+ ChecksumAlgorithm: ctx.String("minio-checksum-algorithm"),
+ },
+ })
+ default:
+ return fmt.Errorf("unsupported storage type: %s", ctx.String("storage"))
+ }
+ if err != nil {
+ return err
+ }
+
+ migratedMethods := map[string]func(context.Context, storage.ObjectStorage) error{
+ "attachments": migrateAttachments,
+ "lfs": migrateLFS,
+ "avatars": migrateAvatars,
+ "repo-avatars": migrateRepoAvatars,
+ "repo-archivers": migrateRepoArchivers,
+ "packages": migratePackages,
+ "actions-log": migrateActionsLog,
+ "actions-artifacts": migrateActionsArtifacts,
+ }
+
+ tp := strings.ToLower(ctx.String("type"))
+ if m, ok := migratedMethods[tp]; ok {
+ if err := m(stdCtx, dstStorage); err != nil {
+ return err
+ }
+ log.Info("%s files have successfully been copied to the new storage.", tp)
+ return nil
+ }
+
+ return fmt.Errorf("unsupported storage: %s", ctx.String("type"))
+}
diff --git a/cmd/migrate_storage_test.go b/cmd/migrate_storage_test.go
new file mode 100644
index 0000000..800a15e
--- /dev/null
+++ b/cmd/migrate_storage_test.go
@@ -0,0 +1,134 @@
+// Copyright 2022 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "io"
+ "os"
+ "strings"
+ "testing"
+
+ "code.gitea.io/gitea/models/actions"
+ "code.gitea.io/gitea/models/db"
+ "code.gitea.io/gitea/models/packages"
+ "code.gitea.io/gitea/models/unittest"
+ user_model "code.gitea.io/gitea/models/user"
+ packages_module "code.gitea.io/gitea/modules/packages"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/modules/storage"
+ "code.gitea.io/gitea/modules/test"
+ packages_service "code.gitea.io/gitea/services/packages"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func createLocalStorage(t *testing.T) (storage.ObjectStorage, string) {
+ t.Helper()
+
+ p := t.TempDir()
+
+ storage, err := storage.NewLocalStorage(
+ context.Background(),
+ &setting.Storage{
+ Path: p,
+ })
+ require.NoError(t, err)
+
+ return storage, p
+}
+
+func TestMigratePackages(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ creator := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ content := "package main\n\nfunc main() {\nfmt.Println(\"hi\")\n}\n"
+ buf, err := packages_module.CreateHashedBufferFromReaderWithSize(strings.NewReader(content), 1024)
+ require.NoError(t, err)
+ defer buf.Close()
+
+ v, f, err := packages_service.CreatePackageAndAddFile(db.DefaultContext, &packages_service.PackageCreationInfo{
+ PackageInfo: packages_service.PackageInfo{
+ Owner: creator,
+ PackageType: packages.TypeGeneric,
+ Name: "test",
+ Version: "1.0.0",
+ },
+ Creator: creator,
+ SemverCompatible: true,
+ VersionProperties: map[string]string{},
+ }, &packages_service.PackageFileCreationInfo{
+ PackageFileInfo: packages_service.PackageFileInfo{
+ Filename: "a.go",
+ },
+ Creator: creator,
+ Data: buf,
+ IsLead: true,
+ })
+ require.NoError(t, err)
+ assert.NotNil(t, v)
+ assert.NotNil(t, f)
+
+ ctx := context.Background()
+
+ dstStorage, p := createLocalStorage(t)
+
+ err = migratePackages(ctx, dstStorage)
+ require.NoError(t, err)
+
+ entries, err := os.ReadDir(p)
+ require.NoError(t, err)
+ assert.Len(t, entries, 2)
+ assert.EqualValues(t, "01", entries[0].Name())
+ assert.EqualValues(t, "tmp", entries[1].Name())
+}
+
+func TestMigrateActionsArtifacts(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ srcStorage, _ := createLocalStorage(t)
+ defer test.MockVariableValue(&storage.ActionsArtifacts, srcStorage)()
+ id := int64(0)
+
+ addArtifact := func(storagePath string, status actions.ArtifactStatus) {
+ id++
+ artifact := &actions.ActionArtifact{
+ ID: id,
+ ArtifactName: storagePath,
+ StoragePath: storagePath,
+ Status: int64(status),
+ }
+ _, err := db.GetEngine(db.DefaultContext).Insert(artifact)
+ require.NoError(t, err)
+ srcStorage.Save(storagePath, strings.NewReader(storagePath), -1)
+ }
+
+ exists := "/exists"
+ addArtifact(exists, actions.ArtifactStatusUploadConfirmed)
+
+ expired := "/expired"
+ addArtifact(expired, actions.ArtifactStatusExpired)
+
+ notFound := "/notfound"
+ addArtifact(notFound, actions.ArtifactStatusUploadConfirmed)
+ srcStorage.Delete(notFound)
+
+ dstStorage, _ := createLocalStorage(t)
+
+ require.NoError(t, migrateActionsArtifacts(db.DefaultContext, dstStorage))
+
+ object, err := dstStorage.Open(exists)
+ require.NoError(t, err)
+ buf, err := io.ReadAll(object)
+ require.NoError(t, err)
+ assert.Equal(t, exists, string(buf))
+
+ _, err = dstStorage.Stat(expired)
+ require.Error(t, err)
+
+ _, err = dstStorage.Stat(notFound)
+ require.Error(t, err)
+}
diff --git a/cmd/restore_repo.go b/cmd/restore_repo.go
new file mode 100644
index 0000000..37b32aa
--- /dev/null
+++ b/cmd/restore_repo.go
@@ -0,0 +1,69 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "strings"
+
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/urfave/cli/v2"
+)
+
+// CmdRestoreRepository represents the available restore a repository sub-command.
+var CmdRestoreRepository = &cli.Command{
+ Name: "restore-repo",
+ Usage: "Restore the repository from disk",
+ Description: "This is a command for restoring the repository data.",
+ Action: runRestoreRepository,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "repo_dir",
+ Aliases: []string{"r"},
+ Value: "./data",
+ Usage: "Repository dir path to restore from",
+ },
+ &cli.StringFlag{
+ Name: "owner_name",
+ Value: "",
+ Usage: "Restore destination owner name",
+ },
+ &cli.StringFlag{
+ Name: "repo_name",
+ Value: "",
+ Usage: "Restore destination repository name",
+ },
+ &cli.StringFlag{
+ Name: "units",
+ Value: "",
+ Usage: `Which items will be restored, one or more units should be separated as comma.
+wiki, issues, labels, releases, release_assets, milestones, pull_requests, comments are allowed. Empty means all units.`,
+ },
+ &cli.BoolFlag{
+ Name: "validation",
+ Usage: "Sanity check the content of the files before trying to load them",
+ },
+ },
+}
+
+func runRestoreRepository(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ setting.MustInstalled()
+ var units []string
+ if s := c.String("units"); s != "" {
+ units = strings.Split(s, ",")
+ }
+ extra := private.RestoreRepo(
+ ctx,
+ c.String("repo_dir"),
+ c.String("owner_name"),
+ c.String("repo_name"),
+ units,
+ c.Bool("validation"),
+ )
+ return handleCliResponseExtra(extra)
+}
diff --git a/cmd/serv.go b/cmd/serv.go
new file mode 100644
index 0000000..db67e36
--- /dev/null
+++ b/cmd/serv.go
@@ -0,0 +1,358 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "net/url"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strconv"
+ "strings"
+ "testing"
+ "time"
+ "unicode"
+
+ asymkey_model "code.gitea.io/gitea/models/asymkey"
+ git_model "code.gitea.io/gitea/models/git"
+ "code.gitea.io/gitea/models/perm"
+ "code.gitea.io/gitea/modules/git"
+ "code.gitea.io/gitea/modules/json"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/pprof"
+ "code.gitea.io/gitea/modules/private"
+ "code.gitea.io/gitea/modules/process"
+ repo_module "code.gitea.io/gitea/modules/repository"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/services/lfs"
+
+ "github.com/golang-jwt/jwt/v5"
+ "github.com/kballard/go-shellquote"
+ "github.com/urfave/cli/v2"
+)
+
+const (
+ lfsAuthenticateVerb = "git-lfs-authenticate"
+)
+
+// CmdServ represents the available serv sub-command.
+var CmdServ = &cli.Command{
+ Name: "serv",
+ Usage: "(internal) Should only be called by SSH shell",
+ Description: "Serv provides access auth for repositories",
+ Before: PrepareConsoleLoggerLevel(log.FATAL),
+ Action: runServ,
+ Flags: []cli.Flag{
+ &cli.BoolFlag{
+ Name: "enable-pprof",
+ },
+ &cli.BoolFlag{
+ Name: "debug",
+ },
+ },
+}
+
+func setup(ctx context.Context, debug bool) {
+ if debug {
+ setupConsoleLogger(log.TRACE, false, os.Stderr)
+ } else {
+ setupConsoleLogger(log.FATAL, false, os.Stderr)
+ }
+ setting.MustInstalled()
+ if _, err := os.Stat(setting.RepoRootPath); err != nil {
+ _ = fail(ctx, "Unable to access repository path", "Unable to access repository path %q, err: %v", setting.RepoRootPath, err)
+ return
+ }
+ if err := git.InitSimple(context.Background()); err != nil {
+ _ = fail(ctx, "Failed to init git", "Failed to init git, err: %v", err)
+ }
+}
+
+var (
+ allowedCommands = map[string]perm.AccessMode{
+ "git-upload-pack": perm.AccessModeRead,
+ "git-upload-archive": perm.AccessModeRead,
+ "git-receive-pack": perm.AccessModeWrite,
+ lfsAuthenticateVerb: perm.AccessModeNone,
+ }
+ alphaDashDotPattern = regexp.MustCompile(`[^\w-\.]`)
+)
+
+// fail prints message to stdout, it's mainly used for git serv and git hook commands.
+// The output will be passed to git client and shown to user.
+func fail(ctx context.Context, userMessage, logMsgFmt string, args ...any) error {
+ if userMessage == "" {
+ userMessage = "Internal Server Error (no specific error)"
+ }
+
+ // There appears to be a chance to cause a zombie process and failure to read the Exit status
+ // if nothing is outputted on stdout.
+ _, _ = fmt.Fprintln(os.Stdout, "")
+ _, _ = fmt.Fprintln(os.Stderr, "Forgejo:", userMessage)
+
+ if logMsgFmt != "" {
+ logMsg := fmt.Sprintf(logMsgFmt, args...)
+ if !setting.IsProd {
+ _, _ = fmt.Fprintln(os.Stderr, "Forgejo:", logMsg)
+ }
+ if userMessage != "" {
+ if unicode.IsPunct(rune(userMessage[len(userMessage)-1])) {
+ logMsg = userMessage + " " + logMsg
+ } else {
+ logMsg = userMessage + ". " + logMsg
+ }
+ }
+ // Don't send an log if this is done in a test and no InternalToken is set.
+ if !testing.Testing() || setting.InternalToken != "" {
+ _ = private.SSHLog(ctx, true, logMsg)
+ }
+ }
+ return cli.Exit("", 1)
+}
+
+// handleCliResponseExtra handles the extra response from the cli sub-commands
+// If there is a user message it will be printed to stdout
+// If the command failed it will return an error (the error will be printed by cli framework)
+func handleCliResponseExtra(extra private.ResponseExtra) error {
+ if extra.UserMsg != "" {
+ _, _ = fmt.Fprintln(os.Stdout, extra.UserMsg)
+ }
+ if extra.HasError() {
+ return cli.Exit(extra.Error, 1)
+ }
+ return nil
+}
+
+func runServ(c *cli.Context) error {
+ ctx, cancel := installSignals()
+ defer cancel()
+
+ // FIXME: This needs to internationalised
+ setup(ctx, c.Bool("debug"))
+
+ if setting.SSH.Disabled {
+ fmt.Println("Forgejo: SSH has been disabled")
+ return nil
+ }
+
+ if c.NArg() < 1 {
+ if err := cli.ShowSubcommandHelp(c); err != nil {
+ fmt.Printf("error showing subcommand help: %v\n", err)
+ }
+ return nil
+ }
+
+ defer func() {
+ if err := recover(); err != nil {
+ _ = fail(ctx, "Internal Server Error", "Panic: %v\n%s", err, log.Stack(2))
+ }
+ }()
+
+ keys := strings.Split(c.Args().First(), "-")
+ if len(keys) != 2 || keys[0] != "key" {
+ return fail(ctx, "Key ID format error", "Invalid key argument: %s", c.Args().First())
+ }
+ keyID, err := strconv.ParseInt(keys[1], 10, 64)
+ if err != nil {
+ return fail(ctx, "Key ID parsing error", "Invalid key argument: %s", c.Args().Get(1))
+ }
+
+ cmd := os.Getenv("SSH_ORIGINAL_COMMAND")
+ if len(cmd) == 0 {
+ key, user, err := private.ServNoCommand(ctx, keyID)
+ if err != nil {
+ return fail(ctx, "Key check failed", "Failed to check provided key: %v", err)
+ }
+ switch key.Type {
+ case asymkey_model.KeyTypeDeploy:
+ fmt.Println("Hi there! You've successfully authenticated with the deploy key named " + key.Name + ", but Forgejo does not provide shell access.")
+ case asymkey_model.KeyTypePrincipal:
+ fmt.Println("Hi there! You've successfully authenticated with the principal " + key.Content + ", but Forgejo does not provide shell access.")
+ default:
+ fmt.Println("Hi there, " + user.Name + "! You've successfully authenticated with the key named " + key.Name + ", but Forgejo does not provide shell access.")
+ }
+ fmt.Println("If this is unexpected, please log in with password and setup Forgejo under another user.")
+ return nil
+ } else if c.Bool("debug") {
+ log.Debug("SSH_ORIGINAL_COMMAND: %s", os.Getenv("SSH_ORIGINAL_COMMAND"))
+ }
+
+ words, err := shellquote.Split(cmd)
+ if err != nil {
+ return fail(ctx, "Error parsing arguments", "Failed to parse arguments: %v", err)
+ }
+
+ if len(words) < 2 {
+ if git.CheckGitVersionAtLeast("2.29") == nil {
+ // for AGit Flow
+ if cmd == "ssh_info" {
+ fmt.Print(`{"type":"gitea","version":1}`)
+ return nil
+ }
+ }
+ return fail(ctx, "Too few arguments", "Too few arguments in cmd: %s", cmd)
+ }
+
+ verb := words[0]
+ repoPath := strings.TrimPrefix(words[1], "/")
+
+ var lfsVerb string
+ if verb == lfsAuthenticateVerb {
+ if !setting.LFS.StartServer {
+ return fail(ctx, "Unknown git command", "LFS authentication request over SSH denied, LFS support is disabled")
+ }
+
+ if len(words) > 2 {
+ lfsVerb = words[2]
+ }
+ }
+
+ rr := strings.SplitN(repoPath, "/", 2)
+ if len(rr) != 2 {
+ return fail(ctx, "Invalid repository path", "Invalid repository path: %v", repoPath)
+ }
+
+ username := rr[0]
+ reponame := strings.TrimSuffix(rr[1], ".git")
+
+ // LowerCase and trim the repoPath as that's how they are stored.
+ // This should be done after splitting the repoPath into username and reponame
+ // so that username and reponame are not affected.
+ repoPath = strings.ToLower(strings.TrimSpace(repoPath))
+
+ if alphaDashDotPattern.MatchString(reponame) {
+ return fail(ctx, "Invalid repo name", "Invalid repo name: %s", reponame)
+ }
+
+ if c.Bool("enable-pprof") {
+ if err := os.MkdirAll(setting.PprofDataPath, os.ModePerm); err != nil {
+ return fail(ctx, "Error while trying to create PPROF_DATA_PATH", "Error while trying to create PPROF_DATA_PATH: %v", err)
+ }
+
+ stopCPUProfiler, err := pprof.DumpCPUProfileForUsername(setting.PprofDataPath, username)
+ if err != nil {
+ return fail(ctx, "Unable to start CPU profiler", "Unable to start CPU profile: %v", err)
+ }
+ defer func() {
+ stopCPUProfiler()
+ err := pprof.DumpMemProfileForUsername(setting.PprofDataPath, username)
+ if err != nil {
+ _ = fail(ctx, "Unable to dump Mem profile", "Unable to dump Mem Profile: %v", err)
+ }
+ }()
+ }
+
+ requestedMode, has := allowedCommands[verb]
+ if !has {
+ return fail(ctx, "Unknown git command", "Unknown git command %s", verb)
+ }
+
+ if verb == lfsAuthenticateVerb {
+ if lfsVerb == "upload" {
+ requestedMode = perm.AccessModeWrite
+ } else if lfsVerb == "download" {
+ requestedMode = perm.AccessModeRead
+ } else {
+ return fail(ctx, "Unknown LFS verb", "Unknown lfs verb %s", lfsVerb)
+ }
+ }
+
+ results, extra := private.ServCommand(ctx, keyID, username, reponame, requestedMode, verb, lfsVerb)
+ if extra.HasError() {
+ return fail(ctx, extra.UserMsg, "ServCommand failed: %s", extra.Error)
+ }
+
+ // LFS token authentication
+ if verb == lfsAuthenticateVerb {
+ url := fmt.Sprintf("%s%s/%s.git/info/lfs", setting.AppURL, url.PathEscape(results.OwnerName), url.PathEscape(results.RepoName))
+
+ now := time.Now()
+ claims := lfs.Claims{
+ RegisteredClaims: jwt.RegisteredClaims{
+ ExpiresAt: jwt.NewNumericDate(now.Add(setting.LFS.HTTPAuthExpiry)),
+ NotBefore: jwt.NewNumericDate(now),
+ },
+ RepoID: results.RepoID,
+ Op: lfsVerb,
+ UserID: results.UserID,
+ }
+ token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims)
+
+ // Sign and get the complete encoded token as a string using the secret
+ tokenString, err := token.SignedString(setting.LFS.JWTSecretBytes)
+ if err != nil {
+ return fail(ctx, "Failed to sign JWT Token", "Failed to sign JWT token: %v", err)
+ }
+
+ tokenAuthentication := &git_model.LFSTokenResponse{
+ Header: make(map[string]string),
+ Href: url,
+ }
+ tokenAuthentication.Header["Authorization"] = fmt.Sprintf("Bearer %s", tokenString)
+
+ enc := json.NewEncoder(os.Stdout)
+ err = enc.Encode(tokenAuthentication)
+ if err != nil {
+ return fail(ctx, "Failed to encode LFS json response", "Failed to encode LFS json response: %v", err)
+ }
+ return nil
+ }
+
+ var gitcmd *exec.Cmd
+ gitBinPath := filepath.Dir(git.GitExecutable) // e.g. /usr/bin
+ gitBinVerb := filepath.Join(gitBinPath, verb) // e.g. /usr/bin/git-upload-pack
+ if _, err := os.Stat(gitBinVerb); err != nil {
+ // if the command "git-upload-pack" doesn't exist, try to split "git-upload-pack" to use the sub-command with git
+ // ps: Windows only has "git.exe" in the bin path, so Windows always uses this way
+ verbFields := strings.SplitN(verb, "-", 2)
+ if len(verbFields) == 2 {
+ // use git binary with the sub-command part: "C:\...\bin\git.exe", "upload-pack", ...
+ gitcmd = exec.CommandContext(ctx, git.GitExecutable, verbFields[1], repoPath)
+ }
+ }
+ if gitcmd == nil {
+ // by default, use the verb (it has been checked above by allowedCommands)
+ gitcmd = exec.CommandContext(ctx, gitBinVerb, repoPath)
+ }
+
+ process.SetSysProcAttribute(gitcmd)
+ gitcmd.Dir = setting.RepoRootPath
+ gitcmd.Stdout = os.Stdout
+ gitcmd.Stdin = os.Stdin
+ gitcmd.Stderr = os.Stderr
+ gitcmd.Env = append(gitcmd.Env, os.Environ()...)
+ gitcmd.Env = append(gitcmd.Env,
+ repo_module.EnvRepoIsWiki+"="+strconv.FormatBool(results.IsWiki),
+ repo_module.EnvRepoName+"="+results.RepoName,
+ repo_module.EnvRepoUsername+"="+results.OwnerName,
+ repo_module.EnvPusherName+"="+results.UserName,
+ repo_module.EnvPusherEmail+"="+results.UserEmail,
+ repo_module.EnvPusherID+"="+strconv.FormatInt(results.UserID, 10),
+ repo_module.EnvRepoID+"="+strconv.FormatInt(results.RepoID, 10),
+ repo_module.EnvPRID+"="+fmt.Sprintf("%d", 0),
+ repo_module.EnvDeployKeyID+"="+fmt.Sprintf("%d", results.DeployKeyID),
+ repo_module.EnvKeyID+"="+fmt.Sprintf("%d", results.KeyID),
+ repo_module.EnvAppURL+"="+setting.AppURL,
+ )
+ // to avoid breaking, here only use the minimal environment variables for the "gitea serv" command.
+ // it could be re-considered whether to use the same git.CommonGitCmdEnvs() as "git" command later.
+ gitcmd.Env = append(gitcmd.Env, git.CommonCmdServEnvs()...)
+
+ if err = gitcmd.Run(); err != nil {
+ return fail(ctx, "Failed to execute git command", "Failed to execute git command: %v", err)
+ }
+
+ // Update user key activity.
+ if results.KeyID > 0 {
+ if err = private.UpdatePublicKeyInRepo(ctx, results.KeyID, results.RepoID); err != nil {
+ return fail(ctx, "Failed to update public key", "UpdatePublicKeyInRepo: %v", err)
+ }
+ }
+
+ return nil
+}
diff --git a/cmd/web.go b/cmd/web.go
new file mode 100644
index 0000000..44babd5
--- /dev/null
+++ b/cmd/web.go
@@ -0,0 +1,357 @@
+// Copyright 2014 The Gogs Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "context"
+ "fmt"
+ "net"
+ "net/http"
+ "os"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ _ "net/http/pprof" // Used for debugging if enabled and a web server is running
+
+ "code.gitea.io/gitea/modules/container"
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/public"
+ "code.gitea.io/gitea/modules/setting"
+ "code.gitea.io/gitea/routers"
+ "code.gitea.io/gitea/routers/install"
+
+ "github.com/felixge/fgprof"
+ "github.com/urfave/cli/v2"
+)
+
+// PIDFile could be set from build tag
+var PIDFile = "/run/gitea.pid"
+
+// CmdWeb represents the available web sub-command.
+var CmdWeb = &cli.Command{
+ Name: "web",
+ Usage: "Start the Forgejo web server",
+ Description: `The Forgejo web server is the only thing you need to run,
+and it takes care of all the other things for you`,
+ Before: PrepareConsoleLoggerLevel(log.INFO),
+ Action: runWeb,
+ Flags: []cli.Flag{
+ &cli.StringFlag{
+ Name: "port",
+ Aliases: []string{"p"},
+ Value: "3000",
+ Usage: "Temporary port number to prevent conflict",
+ },
+ &cli.StringFlag{
+ Name: "install-port",
+ Value: "3000",
+ Usage: "Temporary port number to run the install page on to prevent conflict",
+ },
+ &cli.StringFlag{
+ Name: "pid",
+ Aliases: []string{"P"},
+ Value: PIDFile,
+ Usage: "Custom pid file path",
+ },
+ &cli.BoolFlag{
+ Name: "quiet",
+ Aliases: []string{"q"},
+ Usage: "Only display Fatal logging errors until logging is set-up",
+ },
+ &cli.BoolFlag{
+ Name: "verbose",
+ Usage: "Set initial logging to TRACE level until logging is properly set-up",
+ },
+ },
+}
+
+func runHTTPRedirector() {
+ _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: HTTP Redirector", process.SystemProcessType, true)
+ defer finished()
+
+ source := fmt.Sprintf("%s:%s", setting.HTTPAddr, setting.PortToRedirect)
+ dest := strings.TrimSuffix(setting.AppURL, "/")
+ log.Info("Redirecting: %s to %s", source, dest)
+
+ handler := http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ target := dest + r.URL.Path
+ if len(r.URL.RawQuery) > 0 {
+ target += "?" + r.URL.RawQuery
+ }
+ http.Redirect(w, r, target, http.StatusTemporaryRedirect)
+ })
+
+ err := runHTTP("tcp", source, "HTTP Redirector", handler, setting.RedirectorUseProxyProtocol)
+ if err != nil {
+ log.Fatal("Failed to start port redirection: %v", err)
+ }
+}
+
+func createPIDFile(pidPath string) {
+ currentPid := os.Getpid()
+ if err := os.MkdirAll(filepath.Dir(pidPath), os.ModePerm); err != nil {
+ log.Fatal("Failed to create PID folder: %v", err)
+ }
+
+ file, err := os.Create(pidPath)
+ if err != nil {
+ log.Fatal("Failed to create PID file: %v", err)
+ }
+ defer file.Close()
+ if _, err := file.WriteString(strconv.FormatInt(int64(currentPid), 10)); err != nil {
+ log.Fatal("Failed to write PID information: %v", err)
+ }
+}
+
+func showWebStartupMessage(msg string) {
+ log.Info("Forgejo version: %s%s", setting.AppVer, setting.AppBuiltWith)
+ log.Info("* RunMode: %s", setting.RunMode)
+ log.Info("* AppPath: %s", setting.AppPath)
+ log.Info("* WorkPath: %s", setting.AppWorkPath)
+ log.Info("* CustomPath: %s", setting.CustomPath)
+ log.Info("* ConfigFile: %s", setting.CustomConf)
+ log.Info("%s", msg) // show startup message
+}
+
+func serveInstall(ctx *cli.Context) error {
+ showWebStartupMessage("Prepare to run install page")
+
+ routers.InitWebInstallPage(graceful.GetManager().HammerContext())
+
+ // Flag for port number in case first time run conflict
+ if ctx.IsSet("port") {
+ if err := setPort(ctx.String("port")); err != nil {
+ return err
+ }
+ }
+ if ctx.IsSet("install-port") {
+ if err := setPort(ctx.String("install-port")); err != nil {
+ return err
+ }
+ }
+ c := install.Routes()
+ err := listen(c, false)
+ if err != nil {
+ log.Critical("Unable to open listener for installer. Is Forgejo already running?")
+ graceful.GetManager().DoGracefulShutdown()
+ }
+ select {
+ case <-graceful.GetManager().IsShutdown():
+ <-graceful.GetManager().Done()
+ log.Info("PID: %d Forgejo Web Finished", os.Getpid())
+ log.GetManager().Close()
+ return err
+ default:
+ }
+ return nil
+}
+
+func serveInstalled(ctx *cli.Context) error {
+ setting.InitCfgProvider(setting.CustomConf)
+ setting.LoadCommonSettings()
+ setting.MustInstalled()
+
+ showWebStartupMessage("Prepare to run web server")
+
+ if setting.AppWorkPathMismatch {
+ log.Error("WORK_PATH from config %q doesn't match other paths from environment variables or command arguments. "+
+ "Only WORK_PATH in config should be set and used. Please make sure the path in config file is correct, "+
+ "remove the other outdated work paths from environment variables and command arguments", setting.CustomConf)
+ }
+
+ rootCfg := setting.CfgProvider
+ if rootCfg.Section("").Key("WORK_PATH").String() == "" {
+ saveCfg, err := rootCfg.PrepareSaving()
+ if err != nil {
+ log.Error("Unable to prepare saving WORK_PATH=%s to config %q: %v\nYou should set it manually, otherwise there might be bugs when accessing the git repositories.", setting.AppWorkPath, setting.CustomConf, err)
+ } else {
+ rootCfg.Section("").Key("WORK_PATH").SetValue(setting.AppWorkPath)
+ saveCfg.Section("").Key("WORK_PATH").SetValue(setting.AppWorkPath)
+ if err = saveCfg.Save(); err != nil {
+ log.Error("Unable to update WORK_PATH=%s to config %q: %v\nYou should set it manually, otherwise there might be bugs when accessing the git repositories.", setting.AppWorkPath, setting.CustomConf, err)
+ }
+ }
+ }
+
+ // in old versions, user's custom web files are placed in "custom/public", and they were served as "http://domain.com/assets/xxx"
+ // now, Gitea only serves pre-defined files in the "custom/public" folder basing on the web root, the user should move their custom files to "custom/public/assets"
+ publicFiles, _ := public.AssetFS().ListFiles(".")
+ publicFilesSet := container.SetOf(publicFiles...)
+ publicFilesSet.Remove(".well-known")
+ publicFilesSet.Remove("assets")
+ publicFilesSet.Remove("robots.txt")
+ for _, fn := range publicFilesSet.Values() {
+ log.Error("Found legacy public asset %q in CustomPath. Please move it to %s/public/assets/%s", fn, setting.CustomPath, fn)
+ }
+ if _, err := os.Stat(filepath.Join(setting.CustomPath, "robots.txt")); err == nil {
+ log.Error(`Found legacy public asset "robots.txt" in CustomPath. Please move it to %s/public/robots.txt`, setting.CustomPath)
+ }
+
+ routers.InitWebInstalled(graceful.GetManager().HammerContext())
+
+ // We check that AppDataPath exists here (it should have been created during installation)
+ // We can't check it in `InitWebInstalled`, because some integration tests
+ // use cmd -> InitWebInstalled, but the AppDataPath doesn't exist during those tests.
+ if _, err := os.Stat(setting.AppDataPath); err != nil {
+ log.Fatal("Can not find APP_DATA_PATH %q", setting.AppDataPath)
+ }
+
+ // Override the provided port number within the configuration
+ if ctx.IsSet("port") {
+ if err := setPort(ctx.String("port")); err != nil {
+ return err
+ }
+ }
+
+ // Set up Chi routes
+ webRoutes := routers.NormalRoutes()
+ err := listen(webRoutes, true)
+ <-graceful.GetManager().Done()
+ log.Info("PID: %d Forgejo Web Finished", os.Getpid())
+ log.GetManager().Close()
+ return err
+}
+
+func servePprof() {
+ http.DefaultServeMux.Handle("/debug/fgprof", fgprof.Handler())
+ _, _, finished := process.GetManager().AddTypedContext(context.Background(), "Web: PProf Server", process.SystemProcessType, true)
+ // The pprof server is for debug purpose only, it shouldn't be exposed on public network. At the moment it's not worth to introduce a configurable option for it.
+ log.Info("Starting pprof server on localhost:6060")
+ log.Info("Stopped pprof server: %v", http.ListenAndServe("localhost:6060", nil))
+ finished()
+}
+
+func runWeb(ctx *cli.Context) error {
+ defer func() {
+ if panicked := recover(); panicked != nil {
+ log.Fatal("PANIC: %v\n%s", panicked, log.Stack(2))
+ }
+ }()
+
+ managerCtx, cancel := context.WithCancel(context.Background())
+ graceful.InitManager(managerCtx)
+ defer cancel()
+
+ if os.Getppid() > 1 && len(os.Getenv("LISTEN_FDS")) > 0 {
+ log.Info("Restarting Forgejo on PID: %d from parent PID: %d", os.Getpid(), os.Getppid())
+ } else {
+ log.Info("Starting Forgejo on PID: %d", os.Getpid())
+ }
+
+ // Set pid file setting
+ if ctx.IsSet("pid") {
+ createPIDFile(ctx.String("pid"))
+ }
+
+ if !setting.InstallLock {
+ if err := serveInstall(ctx); err != nil {
+ return err
+ }
+ } else {
+ NoInstallListener()
+ }
+
+ if setting.EnablePprof {
+ go servePprof()
+ }
+
+ return serveInstalled(ctx)
+}
+
+func setPort(port string) error {
+ setting.AppURL = strings.Replace(setting.AppURL, setting.HTTPPort, port, 1)
+ setting.HTTPPort = port
+
+ switch setting.Protocol {
+ case setting.HTTPUnix:
+ case setting.FCGI:
+ case setting.FCGIUnix:
+ default:
+ defaultLocalURL := string(setting.Protocol) + "://"
+ if setting.HTTPAddr == "0.0.0.0" {
+ defaultLocalURL += "localhost"
+ } else {
+ defaultLocalURL += setting.HTTPAddr
+ }
+ defaultLocalURL += ":" + setting.HTTPPort + "/"
+
+ // Save LOCAL_ROOT_URL if port changed
+ rootCfg := setting.CfgProvider
+ saveCfg, err := rootCfg.PrepareSaving()
+ if err != nil {
+ return fmt.Errorf("failed to save config file: %v", err)
+ }
+ rootCfg.Section("server").Key("LOCAL_ROOT_URL").SetValue(defaultLocalURL)
+ saveCfg.Section("server").Key("LOCAL_ROOT_URL").SetValue(defaultLocalURL)
+ if err = saveCfg.Save(); err != nil {
+ return fmt.Errorf("failed to save config file: %v", err)
+ }
+ }
+ return nil
+}
+
+func listen(m http.Handler, handleRedirector bool) error {
+ listenAddr := setting.HTTPAddr
+ if setting.Protocol != setting.HTTPUnix && setting.Protocol != setting.FCGIUnix {
+ listenAddr = net.JoinHostPort(listenAddr, setting.HTTPPort)
+ }
+ _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: Forgejo Server", process.SystemProcessType, true)
+ defer finished()
+ log.Info("Listen: %v://%s%s", setting.Protocol, listenAddr, setting.AppSubURL)
+ // This can be useful for users, many users do wrong to their config and get strange behaviors behind a reverse-proxy.
+ // A user may fix the configuration mistake when he sees this log.
+ // And this is also very helpful to maintainers to provide help to users to resolve their configuration problems.
+ log.Info("AppURL(ROOT_URL): %s", setting.AppURL)
+
+ if setting.LFS.StartServer {
+ log.Info("LFS server enabled")
+ }
+
+ var err error
+ switch setting.Protocol {
+ case setting.HTTP:
+ if handleRedirector {
+ NoHTTPRedirector()
+ }
+ err = runHTTP("tcp", listenAddr, "Web", m, setting.UseProxyProtocol)
+ case setting.HTTPS:
+ if setting.EnableAcme {
+ err = runACME(listenAddr, m)
+ break
+ }
+ if handleRedirector {
+ if setting.RedirectOtherPort {
+ go runHTTPRedirector()
+ } else {
+ NoHTTPRedirector()
+ }
+ }
+ err = runHTTPS("tcp", listenAddr, "Web", setting.CertFile, setting.KeyFile, m, setting.UseProxyProtocol, setting.ProxyProtocolTLSBridging)
+ case setting.FCGI:
+ if handleRedirector {
+ NoHTTPRedirector()
+ }
+ err = runFCGI("tcp", listenAddr, "FCGI Web", m, setting.UseProxyProtocol)
+ case setting.HTTPUnix:
+ if handleRedirector {
+ NoHTTPRedirector()
+ }
+ err = runHTTP("unix", listenAddr, "Web", m, setting.UseProxyProtocol)
+ case setting.FCGIUnix:
+ if handleRedirector {
+ NoHTTPRedirector()
+ }
+ err = runFCGI("unix", listenAddr, "Web", m, setting.UseProxyProtocol)
+ default:
+ log.Fatal("Invalid protocol: %s", setting.Protocol)
+ }
+ if err != nil {
+ log.Critical("Failed to start server: %v", err)
+ }
+ log.Info("HTTP Listener: %s Closed", listenAddr)
+ return err
+}
diff --git a/cmd/web_acme.go b/cmd/web_acme.go
new file mode 100644
index 0000000..90e4a02
--- /dev/null
+++ b/cmd/web_acme.go
@@ -0,0 +1,135 @@
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "crypto/x509"
+ "encoding/pem"
+ "fmt"
+ "net/http"
+ "os"
+ "strconv"
+ "strings"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/process"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/caddyserver/certmagic"
+)
+
+func getCARoot(path string) (*x509.CertPool, error) {
+ r, err := os.ReadFile(path)
+ if err != nil {
+ return nil, err
+ }
+ block, _ := pem.Decode(r)
+ if block == nil {
+ return nil, fmt.Errorf("no PEM found in the file %s", path)
+ }
+ caRoot, err := x509.ParseCertificate(block.Bytes)
+ if err != nil {
+ return nil, err
+ }
+ certPool := x509.NewCertPool()
+ certPool.AddCert(caRoot)
+ return certPool, nil
+}
+
+func runACME(listenAddr string, m http.Handler) error {
+ // If HTTP Challenge enabled, needs to be serving on port 80. For TLSALPN needs 443.
+ // Due to docker port mapping this can't be checked programmatically
+ // TODO: these are placeholders until we add options for each in settings with appropriate warning
+ enableHTTPChallenge := true
+ enableTLSALPNChallenge := true
+ altHTTPPort := 0
+ altTLSALPNPort := 0
+
+ if p, err := strconv.Atoi(setting.PortToRedirect); err == nil {
+ altHTTPPort = p
+ }
+ if p, err := strconv.Atoi(setting.HTTPPort); err == nil {
+ altTLSALPNPort = p
+ }
+
+ magic := certmagic.NewDefault()
+ magic.Storage = &certmagic.FileStorage{Path: setting.AcmeLiveDirectory}
+ // Try to use private CA root if provided, otherwise defaults to system's trust
+ var certPool *x509.CertPool
+ if setting.AcmeCARoot != "" {
+ var err error
+ certPool, err = getCARoot(setting.AcmeCARoot)
+ if err != nil {
+ log.Warn("Failed to parse CA Root certificate, using default CA trust: %v", err)
+ }
+ }
+ myACME := certmagic.NewACMEIssuer(magic, certmagic.ACMEIssuer{
+ CA: setting.AcmeURL,
+ TrustedRoots: certPool,
+ Email: setting.AcmeEmail,
+ Agreed: setting.AcmeTOS,
+ DisableHTTPChallenge: !enableHTTPChallenge,
+ DisableTLSALPNChallenge: !enableTLSALPNChallenge,
+ ListenHost: setting.HTTPAddr,
+ AltTLSALPNPort: altTLSALPNPort,
+ AltHTTPPort: altHTTPPort,
+ })
+
+ magic.Issuers = []certmagic.Issuer{myACME}
+
+ // this obtains certificates or renews them if necessary
+ err := magic.ManageSync(graceful.GetManager().HammerContext(), []string{setting.Domain})
+ if err != nil {
+ return err
+ }
+
+ tlsConfig := magic.TLSConfig()
+ tlsConfig.NextProtos = append(tlsConfig.NextProtos, "h2")
+
+ if version := toTLSVersion(setting.SSLMinimumVersion); version != 0 {
+ tlsConfig.MinVersion = version
+ }
+ if version := toTLSVersion(setting.SSLMaximumVersion); version != 0 {
+ tlsConfig.MaxVersion = version
+ }
+
+ // Set curve preferences
+ if curves := toCurvePreferences(setting.SSLCurvePreferences); len(curves) > 0 {
+ tlsConfig.CurvePreferences = curves
+ }
+
+ // Set cipher suites
+ if ciphers := toTLSCiphers(setting.SSLCipherSuites); len(ciphers) > 0 {
+ tlsConfig.CipherSuites = ciphers
+ }
+
+ if enableHTTPChallenge {
+ go func() {
+ _, _, finished := process.GetManager().AddTypedContext(graceful.GetManager().HammerContext(), "Web: ACME HTTP challenge server", process.SystemProcessType, true)
+ defer finished()
+
+ log.Info("Running Let's Encrypt handler on %s", setting.HTTPAddr+":"+setting.PortToRedirect)
+ // all traffic coming into HTTP will be redirect to HTTPS automatically (LE HTTP-01 validation happens here)
+ err := runHTTP("tcp", setting.HTTPAddr+":"+setting.PortToRedirect, "Let's Encrypt HTTP Challenge", myACME.HTTPChallengeHandler(http.HandlerFunc(runLetsEncryptFallbackHandler)), setting.RedirectorUseProxyProtocol)
+ if err != nil {
+ log.Fatal("Failed to start the Let's Encrypt handler on port %s: %v", setting.PortToRedirect, err)
+ }
+ }()
+ }
+
+ return runHTTPSWithTLSConfig("tcp", listenAddr, "Web", tlsConfig, m, setting.UseProxyProtocol, setting.ProxyProtocolTLSBridging)
+}
+
+func runLetsEncryptFallbackHandler(w http.ResponseWriter, r *http.Request) {
+ if r.Method != "GET" && r.Method != "HEAD" {
+ http.Error(w, "Use HTTPS", http.StatusBadRequest)
+ return
+ }
+ // Remove the trailing slash at the end of setting.AppURL, the request
+ // URI always contains a leading slash, which would result in a double
+ // slash
+ target := strings.TrimSuffix(setting.AppURL, "/") + r.URL.RequestURI()
+ http.Redirect(w, r, target, http.StatusTemporaryRedirect)
+}
diff --git a/cmd/web_graceful.go b/cmd/web_graceful.go
new file mode 100644
index 0000000..996537b
--- /dev/null
+++ b/cmd/web_graceful.go
@@ -0,0 +1,55 @@
+// Copyright 2016 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "net"
+ "net/http"
+ "net/http/fcgi"
+ "strings"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+)
+
+func runHTTP(network, listenAddr, name string, m http.Handler, useProxyProtocol bool) error {
+ return graceful.HTTPListenAndServe(network, listenAddr, name, m, useProxyProtocol)
+}
+
+// NoHTTPRedirector tells our cleanup routine that we will not be using a fallback http redirector
+func NoHTTPRedirector() {
+ graceful.GetManager().InformCleanup()
+}
+
+// NoMainListener tells our cleanup routine that we will not be using a possibly provided listener
+// for our main HTTP/HTTPS service
+func NoMainListener() {
+ graceful.GetManager().InformCleanup()
+}
+
+// NoInstallListener tells our cleanup routine that we will not be using a possibly provided listener
+// for our install HTTP/HTTPS service
+func NoInstallListener() {
+ graceful.GetManager().InformCleanup()
+}
+
+func runFCGI(network, listenAddr, name string, m http.Handler, useProxyProtocol bool) error {
+ // This needs to handle stdin as fcgi point
+ fcgiServer := graceful.NewServer(network, listenAddr, name)
+
+ err := fcgiServer.ListenAndServe(func(listener net.Listener) error {
+ return fcgi.Serve(listener, http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
+ if setting.AppSubURL != "" {
+ req.URL.Path = strings.TrimPrefix(req.URL.Path, setting.AppSubURL)
+ }
+ m.ServeHTTP(resp, req)
+ }))
+ }, useProxyProtocol)
+ if err != nil {
+ log.Fatal("Failed to start FCGI main server: %v", err)
+ }
+ log.Info("FCGI Listener: %s Closed", listenAddr)
+ return err
+}
diff --git a/cmd/web_https.go b/cmd/web_https.go
new file mode 100644
index 0000000..70d35cd
--- /dev/null
+++ b/cmd/web_https.go
@@ -0,0 +1,191 @@
+// Copyright 2021 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package cmd
+
+import (
+ "crypto/tls"
+ "net/http"
+ "os"
+ "strings"
+
+ "code.gitea.io/gitea/modules/graceful"
+ "code.gitea.io/gitea/modules/log"
+ "code.gitea.io/gitea/modules/setting"
+
+ "github.com/klauspost/cpuid/v2"
+)
+
+var tlsVersionStringMap = map[string]uint16{
+ "": tls.VersionTLS12, // Default to tls.VersionTLS12
+ "tlsv1.0": tls.VersionTLS10,
+ "tlsv1.1": tls.VersionTLS11,
+ "tlsv1.2": tls.VersionTLS12,
+ "tlsv1.3": tls.VersionTLS13,
+}
+
+func toTLSVersion(version string) uint16 {
+ tlsVersion, ok := tlsVersionStringMap[strings.TrimSpace(strings.ToLower(version))]
+ if !ok {
+ log.Warn("Unknown tls version: %s", version)
+ return 0
+ }
+ return tlsVersion
+}
+
+var curveStringMap = map[string]tls.CurveID{
+ "x25519": tls.X25519,
+ "p256": tls.CurveP256,
+ "p384": tls.CurveP384,
+ "p521": tls.CurveP521,
+}
+
+func toCurvePreferences(preferences []string) []tls.CurveID {
+ ids := make([]tls.CurveID, 0, len(preferences))
+ for _, pref := range preferences {
+ id, ok := curveStringMap[strings.TrimSpace(strings.ToLower(pref))]
+ if !ok {
+ log.Warn("Unknown curve: %s", pref)
+ }
+ if id != 0 {
+ ids = append(ids, id)
+ }
+ }
+ return ids
+}
+
+var cipherStringMap = map[string]uint16{
+ "rsa_with_rc4_128_sha": tls.TLS_RSA_WITH_RC4_128_SHA,
+ "rsa_with_3des_ede_cbc_sha": tls.TLS_RSA_WITH_3DES_EDE_CBC_SHA,
+ "rsa_with_aes_128_cbc_sha": tls.TLS_RSA_WITH_AES_128_CBC_SHA,
+ "rsa_with_aes_256_cbc_sha": tls.TLS_RSA_WITH_AES_256_CBC_SHA,
+ "rsa_with_aes_128_cbc_sha256": tls.TLS_RSA_WITH_AES_128_CBC_SHA256,
+ "rsa_with_aes_128_gcm_sha256": tls.TLS_RSA_WITH_AES_128_GCM_SHA256,
+ "rsa_with_aes_256_gcm_sha384": tls.TLS_RSA_WITH_AES_256_GCM_SHA384,
+ "ecdhe_ecdsa_with_rc4_128_sha": tls.TLS_ECDHE_ECDSA_WITH_RC4_128_SHA,
+ "ecdhe_ecdsa_with_aes_128_cbc_sha": tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
+ "ecdhe_ecdsa_with_aes_256_cbc_sha": tls.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
+ "ecdhe_rsa_with_rc4_128_sha": tls.TLS_ECDHE_RSA_WITH_RC4_128_SHA,
+ "ecdhe_rsa_with_3des_ede_cbc_sha": tls.TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA,
+ "ecdhe_rsa_with_aes_128_cbc_sha": tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
+ "ecdhe_rsa_with_aes_256_cbc_sha": tls.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
+ "ecdhe_ecdsa_with_aes_128_cbc_sha256": tls.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
+ "ecdhe_rsa_with_aes_128_cbc_sha256": tls.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
+ "ecdhe_rsa_with_aes_128_gcm_sha256": tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ "ecdhe_ecdsa_with_aes_128_gcm_sha256": tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ "ecdhe_rsa_with_aes_256_gcm_sha384": tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ "ecdhe_ecdsa_with_aes_256_gcm_sha384": tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ "ecdhe_rsa_with_chacha20_poly1305_sha256": tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
+ "ecdhe_ecdsa_with_chacha20_poly1305_sha256": tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
+ "ecdhe_rsa_with_chacha20_poly1305": tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
+ "ecdhe_ecdsa_with_chacha20_poly1305": tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
+ "aes_128_gcm_sha256": tls.TLS_AES_128_GCM_SHA256,
+ "aes_256_gcm_sha384": tls.TLS_AES_256_GCM_SHA384,
+ "chacha20_poly1305_sha256": tls.TLS_CHACHA20_POLY1305_SHA256,
+}
+
+func toTLSCiphers(cipherStrings []string) []uint16 {
+ ciphers := make([]uint16, 0, len(cipherStrings))
+ for _, cipherString := range cipherStrings {
+ cipher, ok := cipherStringMap[strings.TrimSpace(strings.ToLower(cipherString))]
+ if !ok {
+ log.Warn("Unknown cipher: %s", cipherString)
+ }
+ if cipher != 0 {
+ ciphers = append(ciphers, cipher)
+ }
+ }
+
+ return ciphers
+}
+
+// defaultCiphers uses hardware support to check if AES is specifically
+// supported by the CPU.
+//
+// If AES is supported AES ciphers will be preferred over ChaCha based ciphers
+// (This code is directly inspired by the certmagic code.)
+func defaultCiphers() []uint16 {
+ if cpuid.CPU.Supports(cpuid.AESNI) {
+ return defaultCiphersAESfirst
+ }
+ return defaultCiphersChaChaFirst
+}
+
+var (
+ defaultCiphersAES = []uint16{
+ tls.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
+ tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
+ tls.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
+ }
+
+ defaultCiphersChaCha = []uint16{
+ tls.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305,
+ tls.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305,
+ }
+
+ defaultCiphersAESfirst = append(defaultCiphersAES, defaultCiphersChaCha...)
+ defaultCiphersChaChaFirst = append(defaultCiphersChaCha, defaultCiphersAES...)
+)
+
+// runHTTPS listens on the provided network address and then calls
+// Serve to handle requests on incoming TLS connections.
+//
+// Filenames containing a certificate and matching private key for the server must
+// be provided. If the certificate is signed by a certificate authority, the
+// certFile should be the concatenation of the server's certificate followed by the
+// CA's certificate.
+func runHTTPS(network, listenAddr, name, certFile, keyFile string, m http.Handler, useProxyProtocol, proxyProtocolTLSBridging bool) error {
+ tlsConfig := &tls.Config{}
+ if tlsConfig.NextProtos == nil {
+ tlsConfig.NextProtos = []string{"h2", "http/1.1"}
+ }
+
+ if version := toTLSVersion(setting.SSLMinimumVersion); version != 0 {
+ tlsConfig.MinVersion = version
+ }
+ if version := toTLSVersion(setting.SSLMaximumVersion); version != 0 {
+ tlsConfig.MaxVersion = version
+ }
+
+ // Set curve preferences
+ tlsConfig.CurvePreferences = []tls.CurveID{
+ tls.X25519,
+ tls.CurveP256,
+ }
+ if curves := toCurvePreferences(setting.SSLCurvePreferences); len(curves) > 0 {
+ tlsConfig.CurvePreferences = curves
+ }
+
+ // Set cipher suites
+ tlsConfig.CipherSuites = defaultCiphers()
+ if ciphers := toTLSCiphers(setting.SSLCipherSuites); len(ciphers) > 0 {
+ tlsConfig.CipherSuites = ciphers
+ }
+
+ tlsConfig.Certificates = make([]tls.Certificate, 1)
+
+ certPEMBlock, err := os.ReadFile(certFile)
+ if err != nil {
+ log.Error("Failed to load https cert file %s for %s:%s: %v", certFile, network, listenAddr, err)
+ return err
+ }
+
+ keyPEMBlock, err := os.ReadFile(keyFile)
+ if err != nil {
+ log.Error("Failed to load https key file %s for %s:%s: %v", keyFile, network, listenAddr, err)
+ return err
+ }
+
+ tlsConfig.Certificates[0], err = tls.X509KeyPair(certPEMBlock, keyPEMBlock)
+ if err != nil {
+ log.Error("Failed to create certificate from cert file %s and key file %s for %s:%s: %v", certFile, keyFile, network, listenAddr, err)
+ return err
+ }
+
+ return graceful.HTTPListenAndServeTLSConfig(network, listenAddr, name, tlsConfig, m, useProxyProtocol, proxyProtocolTLSBridging)
+}
+
+func runHTTPSWithTLSConfig(network, listenAddr, name string, tlsConfig *tls.Config, m http.Handler, useProxyProtocol, proxyProtocolTLSBridging bool) error {
+ return graceful.HTTPListenAndServeTLSConfig(network, listenAddr, name, tlsConfig, m, useProxyProtocol, proxyProtocolTLSBridging)
+}